summary refs log tree commit diff
diff options
context:
space:
mode:
authorDavid Robertson <davidr@element.io>2023-01-10 12:43:28 +0000
committerDavid Robertson <davidr@element.io>2023-01-10 12:43:28 +0000
commit04aa6a970790543e773639c821d0628e5fadaf32 (patch)
tree48adfb5013d2a31253030a0f2508896e7c72b390
parentMerge branch 'rei/dresync_exp' into matrix-org-hotfixes (diff)
parentUpdate changelog 2 (diff)
downloadsynapse-04aa6a970790543e773639c821d0628e5fadaf32.tar.xz
Merge remote-tracking branch 'origin/release-v1.75' into matrix-org-hotfixes
-rw-r--r--.flake818
-rw-r--r--.github/workflows/dependabot_changelog.yml7
-rw-r--r--.github/workflows/docs-pr-netlify.yaml2
-rw-r--r--.github/workflows/docs-pr.yaml26
-rw-r--r--.github/workflows/docs.yaml2
-rw-r--r--.github/workflows/latest_deps.yml2
-rw-r--r--.github/workflows/release-artifacts.yml2
-rw-r--r--.github/workflows/tests.yml2
-rw-r--r--.github/workflows/twisted_trunk.yml2
-rw-r--r--CHANGES.md94
-rw-r--r--Cargo.lock8
-rw-r--r--changelog.d/14263.misc1
-rw-r--r--changelog.d/14545.misc1
-rw-r--r--changelog.d/14546.misc1
-rw-r--r--changelog.d/14644.bugfix1
-rw-r--r--changelog.d/14665.misc1
-rw-r--r--changelog.d/14669.bugfix1
-rw-r--r--changelog.d/14672.misc1
-rw-r--r--changelog.d/14673.doc1
-rw-r--r--changelog.d/14674.doc1
-rw-r--r--changelog.d/14676.misc1
-rw-r--r--changelog.d/14680.misc1
-rw-r--r--changelog.d/14681.misc1
-rw-r--r--changelog.d/14685.misc1
-rw-r--r--changelog.d/14693.misc1
-rw-r--r--changelog.d/14694.misc1
-rw-r--r--changelog.d/14695.misc1
-rw-r--r--changelog.d/14696.misc1
-rw-r--r--changelog.d/14697.misc1
-rw-r--r--changelog.d/14698.misc1
-rw-r--r--changelog.d/14700.misc1
-rw-r--r--changelog.d/14701.misc1
-rw-r--r--changelog.d/14702.misc1
-rw-r--r--changelog.d/14707.misc1
-rw-r--r--debian/changelog6
-rw-r--r--docker/Dockerfile-dhvirtualenv4
-rw-r--r--docker/complement/conf/workers-shared-extra.yaml.j22
-rw-r--r--docs/admin_api/account_validity.md2
-rw-r--r--docs/admin_api/event_reports.md2
-rw-r--r--docs/admin_api/media_admin_api.md2
-rw-r--r--docs/admin_api/purge_history_api.md2
-rw-r--r--docs/admin_api/room_membership.md2
-rw-r--r--docs/admin_api/rooms.md4
-rw-r--r--docs/admin_api/statistics.md2
-rw-r--r--docs/admin_api/user_admin_api.md2
-rw-r--r--docs/code_style.md4
-rw-r--r--docs/development/contributing_guide.md11
-rw-r--r--docs/modules/writing_a_module.md6
-rw-r--r--docs/openid.md602
-rw-r--r--docs/postgres.md2
-rw-r--r--docs/reverse_proxy.md2
-rw-r--r--docs/setup/installation.md2
-rw-r--r--docs/sso_mapping_providers.md2
-rw-r--r--docs/upgrade.md8
-rw-r--r--docs/usage/administration/admin_api/README.md6
-rw-r--r--docs/usage/administration/admin_api/federation.md4
-rw-r--r--docs/usage/administration/admin_api/registration_tokens.md2
-rw-r--r--docs/usage/administration/admin_faq.md4
-rw-r--r--docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md2
-rw-r--r--docs/usage/administration/request_log.md2
-rw-r--r--docs/usage/configuration/config_documentation.md300
-rw-r--r--docs/workers.md45
-rw-r--r--mypy.ini6
-rw-r--r--poetry.lock592
-rw-r--r--pyproject.toml50
-rwxr-xr-xscripts-dev/complement.sh2
-rwxr-xr-xscripts-dev/lint.sh6
-rw-r--r--stubs/frozendict.pyi2
-rw-r--r--stubs/icu.pyi2
-rw-r--r--stubs/sortedcontainers/sorteddict.pyi2
-rw-r--r--stubs/sortedcontainers/sortedlist.pyi2
-rw-r--r--stubs/sortedcontainers/sortedset.pyi2
-rwxr-xr-xsynapse/_scripts/synapse_port_db.py2
-rw-r--r--synapse/api/filtering.py13
-rw-r--r--synapse/config/_base.pyi2
-rw-r--r--synapse/config/experimental.py3
-rw-r--r--synapse/config/oidc.py6
-rw-r--r--synapse/handlers/account_data.py111
-rw-r--r--synapse/handlers/device.py9
-rw-r--r--synapse/handlers/oidc.py85
-rw-r--r--synapse/handlers/search.py2
-rw-r--r--synapse/handlers/sync.py34
-rw-r--r--synapse/module_api/__init__.py40
-rw-r--r--synapse/push/clientformat.py5
-rw-r--r--synapse/replication/http/account_data.py92
-rw-r--r--synapse/replication/tcp/client.py3
-rw-r--r--synapse/rest/client/account.py5
-rw-r--r--synapse/rest/client/account_data.py115
-rw-r--r--synapse/rest/media/v1/oembed.py15
-rw-r--r--synapse/storage/_base.py17
-rw-r--r--synapse/storage/database.py33
-rw-r--r--synapse/storage/databases/main/account_data.py233
-rw-r--r--synapse/storage/databases/main/cache.py11
-rw-r--r--synapse/storage/databases/main/deviceinbox.py7
-rw-r--r--synapse/storage/databases/main/devices.py11
-rw-r--r--synapse/storage/databases/main/events_worker.py15
-rw-r--r--synapse/storage/databases/main/presence.py8
-rw-r--r--synapse/storage/databases/main/push_rule.py7
-rw-r--r--synapse/storage/databases/main/pusher.py6
-rw-r--r--synapse/storage/databases/main/receipts.py7
-rw-r--r--synapse/storage/databases/main/tags.py8
-rw-r--r--synapse/util/macaroons.py7
-rw-r--r--tests/crypto/test_event_signing.py6
-rw-r--r--tests/crypto/test_keyring.py122
-rw-r--r--tests/handlers/test_oidc.py152
-rw-r--r--tests/metrics/test_metrics.py24
-rw-r--r--tests/rest/client/test_account.py30
-rw-r--r--tests/rest/media/v1/test_oembed.py10
-rw-r--r--tests/util/test_macaroons.py1
109 files changed, 2135 insertions, 1010 deletions
diff --git a/.flake8 b/.flake8
deleted file mode 100644

index 4c6a4d5843..0000000000 --- a/.flake8 +++ /dev/null
@@ -1,18 +0,0 @@ -# TODO: incorporate this into pyproject.toml if flake8 supports it in the future. -# See https://github.com/PyCQA/flake8/issues/234 -[flake8] -# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes -# for error codes. The ones we ignore are: -# W503: line break before binary operator -# W504: line break after binary operator -# E203: whitespace before ':' (which is contrary to pep8?) -# E731: do not assign a lambda expression, use a def -# E501: Line too long (black enforces this for us) -# -# flake8-bugbear runs extra checks. Its error codes are described at -# https://github.com/PyCQA/flake8-bugbear#list-of-warnings -# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks -# B023: Functions defined inside a loop must not use variables redefined in the loop -# B024: Abstract base class with no abstract method. - -ignore=W503,W504,E203,E731,E501,B019,B023,B024 diff --git a/.github/workflows/dependabot_changelog.yml b/.github/workflows/dependabot_changelog.yml
index b6a29a5722..df47e3dcba 100644 --- a/.github/workflows/dependabot_changelog.yml +++ b/.github/workflows/dependabot_changelog.yml
@@ -6,7 +6,7 @@ on: - reopened # For debugging! permissions: - # Needed to be able to push the commit. See + # Needed to be able to push the commit. See # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request # for a similar example contents: write @@ -20,8 +20,11 @@ jobs: with: ref: ${{ github.event.pull_request.head.ref }} - name: Write, commit and push changelog + env: + PR_TITLE: ${{ github.event.pull_request.title }} + PR_NUMBER: ${{ github.event.pull_request.number }} run: | - echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc + echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc git add changelog.d git config user.email "github-actions[bot]@users.noreply.github.com" git config user.name "GitHub Actions" diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml
index 231982f681..ef7a38144e 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml
@@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@e6e25ac3a2b93187502a8be1ef9e9603afc34925 # v2.24.2 + uses: dawidd6/action-download-artifact@bd10f381a96414ce2b13a11bfa89902ba7cea07f # v2.24.3 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml
index cde6cf511e..d41f6c4490 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml
@@ -4,6 +4,8 @@ on: pull_request: paths: - docs/** + - book.toml + - .github/workflows/docs-pr.yaml jobs: pages: @@ -32,3 +34,27 @@ jobs: path: book # We'll only use this in a workflow_run, then we're done with it retention-days: 1 + + link-check: + name: Check links in documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Setup mdbook + uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + with: + mdbook-version: '0.4.17' + + - name: Setup htmltest + run: | + wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz + echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c + tar zxf htmltest_0.17.0_linux_amd64.tar.gz + + - name: Test links with htmltest + # Build the book with `./` as the site URL (to make checks on 404.html possible) + # Then run htmltest (without checking external links since that involves the network and is slow). + run: | + MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build + ./htmltest book --skip-external diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 575412d965..0b33058337 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml
@@ -58,7 +58,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0 + uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml
index e5e4e8da77..5ab9a8af34 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml
@@ -208,7 +208,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 + - uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml
index 0601a7dbaf..30ac4c1571 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml
@@ -148,7 +148,7 @@ jobs: env: # Skip testing for platforms which various libraries don't have wheels # for, and so need extra build deps. - CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx* + CIBW_TEST_SKIP: pp3{7,9}-* *i686* *musl* # Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583 CARGO_NET_GIT_FETCH_WITH_CLI: true CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index f07655d982..5a0c0a0d65 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml
@@ -53,7 +53,7 @@ jobs: - run: scripts-dev/check_schema_delta.py --force-colors lint: - uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1" + uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2" with: typechecking-extras: "all" diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml
index b08222f289..0a88f0cd7b 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml
@@ -174,7 +174,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 + - uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/CHANGES.md b/CHANGES.md
index 0b9ea88e84..31e476aa4a 100644 --- a/CHANGES.md +++ b/CHANGES.md
@@ -1,3 +1,97 @@ +Synapse 1.75.0rc1 (2023-01-10) +============================== + +Features +-------- + +- Add a `cached` function to `synapse.module_api` that returns a decorator to cache return values of functions. ([\#14663](https://github.com/matrix-org/synapse/issues/14663)) +- Add experimental support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) (removing account data). ([\#14714](https://github.com/matrix-org/synapse/issues/14714)) +- Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. ([\#14750](https://github.com/matrix-org/synapse/issues/14750)) +- Support non-OpenID compliant userinfo claims for subject and picture. ([\#14753](https://github.com/matrix-org/synapse/issues/14753)) +- Improve performance of `/sync` when filtering all rooms, message types, or senders. ([\#14786](https://github.com/matrix-org/synapse/issues/14786)) +- Improve performance of the `/hierarchy` endpoint. ([\#14263](https://github.com/matrix-org/synapse/issues/14263)) + + +Bugfixes +-------- + +- Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. ([\#14644](https://github.com/matrix-org/synapse/issues/14644)) +- Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. ([\#14669](https://github.com/matrix-org/synapse/issues/14669)) +- Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14723](https://github.com/matrix-org/synapse/issues/14723)) +- Remove the unspecced `device` field from `/pushrules` responses. ([\#14727](https://github.com/matrix-org/synapse/issues/14727)) +- Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). ([\#14751](https://github.com/matrix-org/synapse/issues/14751)) +- Unescape HTML entities in URL preview titles making use of oEmbed responses. ([\#14781](https://github.com/matrix-org/synapse/issues/14781)) +- Disable sending confirmation email when 3pid is disabled. ([\#14725](https://github.com/matrix-org/synapse/issues/14725)) + + +Improved Documentation +---------------------- + +- Declare support for Python 3.11. ([\#14673](https://github.com/matrix-org/synapse/issues/14673)) +- Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. ([\#14674](https://github.com/matrix-org/synapse/issues/14674)) +- Move `email` to Server section in config file documentation. ([\#14730](https://github.com/matrix-org/synapse/issues/14730)) +- Fix broken links in the Synapse documentation. ([\#14744](https://github.com/matrix-org/synapse/issues/14744)) +- Add missing worker settings to shared configuration documentation. ([\#14748](https://github.com/matrix-org/synapse/issues/14748)) +- Document using Twitter as a OAuth 2.0 authentication provider. ([\#14778](https://github.com/matrix-org/synapse/issues/14778)) +- Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. ([\#14797](https://github.com/matrix-org/synapse/issues/14797)) +- Update link to towncrier in contribution guide. ([\#14801](https://github.com/matrix-org/synapse/issues/14801)) +- Use `htmltest` to check links in the Synapse documentation. ([\#14743](https://github.com/matrix-org/synapse/issues/14743)) + + +Internal Changes +---------------- + +- Faster remote room joins: stream the un-partial-stating of events over replication. ([\#14545](https://github.com/matrix-org/synapse/issues/14545), [\#14546](https://github.com/matrix-org/synapse/issues/14546)) +- Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. ([\#14633](https://github.com/matrix-org/synapse/issues/14633), [\#14741](https://github.com/matrix-org/synapse/issues/14741)) +- Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. ([\#14665](https://github.com/matrix-org/synapse/issues/14665)) +- Remove dependency on jQuery on reCAPTCHA page. ([\#14672](https://github.com/matrix-org/synapse/issues/14672)) +- Faster joins: make `compute_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. ([\#14676](https://github.com/matrix-org/synapse/issues/14676)) +- Add missing type hints. ([\#14680](https://github.com/matrix-org/synapse/issues/14680), [\#14681](https://github.com/matrix-org/synapse/issues/14681), [\#14687](https://github.com/matrix-org/synapse/issues/14687)) +- Improve type annotations for the helper methods on a `CachedFunction`. ([\#14685](https://github.com/matrix-org/synapse/issues/14685)) +- Check that the SQLite database file exists before porting to PostgreSQL. ([\#14692](https://github.com/matrix-org/synapse/issues/14692)) +- Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. ([\#14707](https://github.com/matrix-org/synapse/issues/14707)) +- Batch up replication requests to request the resyncing of remote users's devices. ([\#14716](https://github.com/matrix-org/synapse/issues/14716)) +- If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. ([\#14724](https://github.com/matrix-org/synapse/issues/14724)) +- Change GHA CI job to follow best practices. ([\#14772](https://github.com/matrix-org/synapse/issues/14772)) +- Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. ([\#14774](https://github.com/matrix-org/synapse/issues/14774)) +- Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. ([\#14802](https://github.com/matrix-org/synapse/issues/14802)) + +### Dependabot updates + +<details> + +- Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) +- Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) +- Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) +- Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) +- Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) +- Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) +- Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) +- Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) +- Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) +- Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) +- Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) +- Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) +- Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) +- Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) +- Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) +- Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) +- Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) +- Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) +- Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) +- Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) +- Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) +- Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) +- Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) +- Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) +- Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) +- Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) +- Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) +- Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) +- Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) +- Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) +</details> + Synapse 1.74.0 (2022-12-20) =========================== diff --git a/Cargo.lock b/Cargo.lock
index c249ec56f7..ace6a8c50a 100644 --- a/Cargo.lock +++ b/Cargo.lock
@@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.151" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.151" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/14263.misc b/changelog.d/14263.misc deleted file mode 100644
index 11d9446a4b..0000000000 --- a/changelog.d/14263.misc +++ /dev/null
@@ -1 +0,0 @@ -Improve performance of the `/hierarchy` endpoint. diff --git a/changelog.d/14545.misc b/changelog.d/14545.misc deleted file mode 100644
index 60b6761a51..0000000000 --- a/changelog.d/14545.misc +++ /dev/null
@@ -1 +0,0 @@ -Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14546.misc b/changelog.d/14546.misc deleted file mode 100644
index 60b6761a51..0000000000 --- a/changelog.d/14546.misc +++ /dev/null
@@ -1 +0,0 @@ -Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14644.bugfix b/changelog.d/14644.bugfix deleted file mode 100644
index 711088bb7e..0000000000 --- a/changelog.d/14644.bugfix +++ /dev/null
@@ -1 +0,0 @@ -Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. \ No newline at end of file diff --git a/changelog.d/14665.misc b/changelog.d/14665.misc deleted file mode 100644
index 2b7c96143d..0000000000 --- a/changelog.d/14665.misc +++ /dev/null
@@ -1 +0,0 @@ -Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. diff --git a/changelog.d/14669.bugfix b/changelog.d/14669.bugfix deleted file mode 100644
index bea316b065..0000000000 --- a/changelog.d/14669.bugfix +++ /dev/null
@@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. diff --git a/changelog.d/14672.misc b/changelog.d/14672.misc deleted file mode 100644
index b94ebed971..0000000000 --- a/changelog.d/14672.misc +++ /dev/null
@@ -1 +0,0 @@ -Remove dependency on jQuery on reCAPTCHA page. diff --git a/changelog.d/14673.doc b/changelog.d/14673.doc deleted file mode 100644
index 7baf5f7f38..0000000000 --- a/changelog.d/14673.doc +++ /dev/null
@@ -1 +0,0 @@ -Declare support for Python 3.11. diff --git a/changelog.d/14674.doc b/changelog.d/14674.doc deleted file mode 100644
index df21417819..0000000000 --- a/changelog.d/14674.doc +++ /dev/null
@@ -1 +0,0 @@ -Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. diff --git a/changelog.d/14676.misc b/changelog.d/14676.misc deleted file mode 100644
index 8a41df9c64..0000000000 --- a/changelog.d/14676.misc +++ /dev/null
@@ -1 +0,0 @@ -Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. diff --git a/changelog.d/14680.misc b/changelog.d/14680.misc deleted file mode 100644
index d44571b731..0000000000 --- a/changelog.d/14680.misc +++ /dev/null
@@ -1 +0,0 @@ -Add missing type hints. diff --git a/changelog.d/14681.misc b/changelog.d/14681.misc deleted file mode 100644
index d44571b731..0000000000 --- a/changelog.d/14681.misc +++ /dev/null
@@ -1 +0,0 @@ -Add missing type hints. diff --git a/changelog.d/14685.misc b/changelog.d/14685.misc deleted file mode 100644
index 3ba2270100..0000000000 --- a/changelog.d/14685.misc +++ /dev/null
@@ -1 +0,0 @@ -Improve type annotations for the helper methods on a `CachedFunction`. \ No newline at end of file diff --git a/changelog.d/14693.misc b/changelog.d/14693.misc deleted file mode 100644
index 86771f41b2..0000000000 --- a/changelog.d/14693.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. diff --git a/changelog.d/14694.misc b/changelog.d/14694.misc deleted file mode 100644
index 146238d8c5..0000000000 --- a/changelog.d/14694.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump anyhow from 1.0.66 to 1.0.68. diff --git a/changelog.d/14695.misc b/changelog.d/14695.misc deleted file mode 100644
index 57e08498be..0000000000 --- a/changelog.d/14695.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump blake2 from 0.10.5 to 0.10.6. diff --git a/changelog.d/14696.misc b/changelog.d/14696.misc deleted file mode 100644
index 9849366b9f..0000000000 --- a/changelog.d/14696.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump serde_json from 1.0.89 to 1.0.91. diff --git a/changelog.d/14697.misc b/changelog.d/14697.misc deleted file mode 100644
index 514209fcc3..0000000000 --- a/changelog.d/14697.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump serde from 1.0.150 to 1.0.151. diff --git a/changelog.d/14698.misc b/changelog.d/14698.misc deleted file mode 100644
index 2e2072183e..0000000000 --- a/changelog.d/14698.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump lxml from 4.9.1 to 4.9.2. diff --git a/changelog.d/14700.misc b/changelog.d/14700.misc deleted file mode 100644
index 253eb1721d..0000000000 --- a/changelog.d/14700.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. diff --git a/changelog.d/14701.misc b/changelog.d/14701.misc deleted file mode 100644
index 05c89d5948..0000000000 --- a/changelog.d/14701.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump sentry-sdk from 1.11.1 to 1.12.0. diff --git a/changelog.d/14702.misc b/changelog.d/14702.misc deleted file mode 100644
index 17c0485f12..0000000000 --- a/changelog.d/14702.misc +++ /dev/null
@@ -1 +0,0 @@ -Bump types-setuptools from 65.6.0.1 to 65.6.0.2. diff --git a/changelog.d/14707.misc b/changelog.d/14707.misc deleted file mode 100644
index 38f47a6f30..0000000000 --- a/changelog.d/14707.misc +++ /dev/null
@@ -1 +0,0 @@ -Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog
index f6edb4d860..e02793c996 100644 --- a/debian/changelog +++ b/debian/changelog
@@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium + + * New Synapse release 1.75.0rc1. + + -- Synapse Packaging team <packages@matrix.org> Tue, 10 Jan 2023 12:18:27 +0000 + matrix-synapse-py3 (1.74.0) stable; urgency=medium * New Synapse release 1.74.0. diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv
index f3b5b00ce6..2013732422 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv
@@ -36,8 +36,10 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ wget # fetch and unpack the package +# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with +# Debian sid. TODO: Switch back to upstream once https://github.com/spotify/dh-virtualenv/pull/354 has merged. RUN mkdir /dh-virtualenv -RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz +RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/matrix-org/dh-virtualenv/archive/refs/tags/matrixorg-2023010302.tar.gz RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz # install its build deps. We do another apt-cache-update here, because we might diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2
index ca640c343b..cb839fed07 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2
@@ -102,6 +102,8 @@ experimental_features: {% endif %} # Filtering /messages by relation type. msc3874_enabled: true + # Enable removing account data support + msc3391_enabled: true server_notices: system_mxid_localpart: _server diff --git a/docs/admin_api/account_validity.md b/docs/admin_api/account_validity.md
index d878bf7451..87d8f7150e 100644 --- a/docs/admin_api/account_validity.md +++ b/docs/admin_api/account_validity.md
@@ -5,7 +5,7 @@ use it, you must enable the account validity feature (under `account_validity`) in Synapse's configuration. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Renew account diff --git a/docs/admin_api/event_reports.md b/docs/admin_api/event_reports.md
index be6f0961bf..beec8bb7ef 100644 --- a/docs/admin_api/event_reports.md +++ b/docs/admin_api/event_reports.md
@@ -3,7 +3,7 @@ This API returns information about reported events. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The api is: ``` diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md
index d57c5aedae..7f8c8e22c1 100644 --- a/docs/admin_api/media_admin_api.md +++ b/docs/admin_api/media_admin_api.md
@@ -6,7 +6,7 @@ Details about the format of the `media_id` and storage of the media in the file are documented under [media repository](../media_repository.md). To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## List all media in a room diff --git a/docs/admin_api/purge_history_api.md b/docs/admin_api/purge_history_api.md
index 2527e2758b..ba6d08aa4d 100644 --- a/docs/admin_api/purge_history_api.md +++ b/docs/admin_api/purge_history_api.md
@@ -11,7 +11,7 @@ Note that Synapse requires at least one message in each room, so it will never delete the last message in a room. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The API is: diff --git a/docs/admin_api/room_membership.md b/docs/admin_api/room_membership.md
index 310d6ae628..94bc95a8d5 100644 --- a/docs/admin_api/room_membership.md +++ b/docs/admin_api/room_membership.md
@@ -6,7 +6,7 @@ local users. The server administrator must be in the room and have permission to invite users. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Parameters diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md
index 8f727b363e..66b29e82dc 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md
@@ -5,7 +5,7 @@ server. There are various parameters available that allow for filtering and sorting the returned list. This API supports pagination. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). **Parameters** @@ -400,7 +400,7 @@ sent to a room in a given timeframe. There are various parameters available that allow for filtering and ordering the returned list. This API supports pagination. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages). diff --git a/docs/admin_api/statistics.md b/docs/admin_api/statistics.md
index a26c76f9f3..03b3621e55 100644 --- a/docs/admin_api/statistics.md +++ b/docs/admin_api/statistics.md
@@ -4,7 +4,7 @@ Returns information about all local media usage of users. Gives the possibility to filter them by time and user. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The API is: diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md
index 880bef4194..86c29ab380 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md
@@ -1,7 +1,7 @@ # User Admin API To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Query User Account diff --git a/docs/code_style.md b/docs/code_style.md
index d65fda62d1..3aa7d0d741 100644 --- a/docs/code_style.md +++ b/docs/code_style.md
@@ -10,7 +10,7 @@ The necessary tools are: - [black](https://black.readthedocs.io/en/stable/), a source code formatter; - [isort](https://pycqa.github.io/isort/), which organises each file's imports; -- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and +- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and - [mypy](https://mypy.readthedocs.io/en/stable/), a type checker. Install them with: @@ -28,7 +28,7 @@ scripts-dev/lint.sh It's worth noting that modern IDEs and text editors can run these tools automatically on save. It may be worth looking into whether this functionality is supported in your editor for a more convenient -development workflow. It is not, however, recommended to run `flake8` or `mypy` +development workflow. It is not, however, recommended to run `mypy` on save as they take a while and can be very resource intensive. ## General rules diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md
index d07790f184..4c10676714 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md
@@ -106,8 +106,8 @@ regarding Synapse's Admin API, which is used mostly by sysadmins and external service developers. Synapse's code style is documented [here](../code_style.md). Please follow -it, including the conventions for the [sample configuration -file](../code_style.md#configuration-file-format). +it, including the conventions for [configuration +options and documentation](../code_style.md#configuration-code-and-documentation-format). We welcome improvements and additions to our documentation itself! When writing new pages, please @@ -126,7 +126,7 @@ changes to the Rust code. # 8. Test, test, test! -<a name="test-test-test"></a> +<a name="test-test-test" id="test-test-test"></a> While you're developing and before submitting a patch, you'll want to test your code. @@ -382,7 +382,7 @@ To prepare a Pull Request, please: ## Changelog All changes, even minor ones, need a corresponding changelog / newsfragment -entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier). +entry. These are managed by [Towncrier](https://github.com/twisted/towncrier). To create a changelog entry, make a new file in the `changelog.d` directory named in the format of `PRnumber.type`. The type can be one of the following: @@ -424,8 +424,7 @@ chicken-and-egg problem. There are two options for solving this: 1. Open the PR without a changelog file, see what number you got, and *then* - add the changelog file to your branch (see [Updating your pull - request](#updating-your-pull-request)), or: + add the changelog file to your branch, or: 1. Look at the [list of all issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the diff --git a/docs/modules/writing_a_module.md b/docs/modules/writing_a_module.md
index e6303b739e..30de69a533 100644 --- a/docs/modules/writing_a_module.md +++ b/docs/modules/writing_a_module.md
@@ -59,8 +59,8 @@ namespace (such as anything under `/_matrix/client` for example). It is strongly recommended that modules register their web resources under the `/_synapse/client` namespace. -The provided resource is a Python class that implements Twisted's [IResource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.IResource.html) -interface (such as [Resource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.Resource.html)). +The provided resource is a Python class that implements Twisted's [IResource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.IResource.html) +interface (such as [Resource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.Resource.html)). Only one resource can be registered for a given path. If several modules attempt to register a resource for the same path, the module that appears first in Synapse's @@ -82,4 +82,4 @@ the callback name as the argument name and the function as its value. A `register_[...]_callbacks` method exists for each category. Callbacks for each category can be found on their respective page of the -[Synapse documentation website](https://matrix-org.github.io/synapse). \ No newline at end of file +[Synapse documentation website](https://matrix-org.github.io/synapse). diff --git a/docs/openid.md b/docs/openid.md
index e4ad45f306..6ee8c83ec0 100644 --- a/docs/openid.md +++ b/docs/openid.md
@@ -88,98 +88,41 @@ oidc_providers: display_name_template: "{{ user.name }}" ``` -### Dex - -[Dex][dex-idp] is a simple, open-source OpenID Connect Provider. -Although it is designed to help building a full-blown provider with an -external database, it can be configured with static passwords in a config file. - -Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/) -to install Dex. - -Edit `examples/config-dev.yaml` config file from the Dex repo to add a client: - -```yaml -staticClients: -- id: synapse - secret: secret - redirectURIs: - - '[synapse public baseurl]/_synapse/client/oidc/callback' - name: 'Synapse' -``` - -Run with `dex serve examples/config-dev.yaml`. - -Synapse config: - -```yaml -oidc_providers: - - idp_id: dex - idp_name: "My Dex server" - skip_verification: true # This is needed as Dex is served on an insecure endpoint - issuer: "http://127.0.0.1:5556/dex" - client_id: "synapse" - client_secret: "secret" - scopes: ["openid", "profile"] - user_mapping_provider: - config: - localpart_template: "{{ user.name }}" - display_name_template: "{{ user.name|capitalize }}" -``` -### Keycloak - -[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat. - -Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak. -This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak. - -Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm. - -1. Click `Clients` in the sidebar and click `Create` - -2. Fill in the fields as below: - -| Field | Value | -|-----------|-----------| -| Client ID | `synapse` | -| Client Protocol | `openid-connect` | +### Apple -3. Click `Save` -4. Fill in the fields as below: +Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account. -| Field | Value | -|-----------|-----------| -| Client ID | `synapse` | -| Enabled | `On` | -| Client Protocol | `openid-connect` | -| Access Type | `confidential` | -| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` | -| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` | -| Backchannel Logout Session Required (optional) | `On` | +You will need to create a new "Services ID" for SiWA, and create and download a +private key with "SiWA" enabled. -5. Click `Save` -6. On the Credentials tab, update the fields: +As well as the private key file, you will need: + * Client ID: the "identifier" you gave the "Services ID" + * Team ID: a 10-character ID associated with your developer account. + * Key ID: the 10-character identifier for the key. -| Field | Value | -|-------|-------| -| Client Authenticator | `Client ID and Secret` | +[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e) +has more information on setting up SiWA. -7. Click `Regenerate Secret` -8. Copy Secret +The synapse config will look like this: ```yaml -oidc_providers: - - idp_id: keycloak - idp_name: "My KeyCloak server" - issuer: "https://127.0.0.1:8443/realms/{realm_name}" - client_id: "synapse" - client_secret: "copy secret generated from above" - scopes: ["openid", "profile"] + - idp_id: apple + idp_name: Apple + issuer: "https://appleid.apple.com" + client_id: "your-client-id" # Set to the "identifier" for your "ServicesID" + client_auth_method: "client_secret_post" + client_secret_jwt_key: + key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file + jwt_header: + alg: ES256 + kid: "KEYIDCODE" # Set to the 10-char Key ID + jwt_payload: + iss: TEAMIDCODE # Set to the 10-char Team ID + scopes: ["name", "email", "openid"] + authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post user_mapping_provider: config: - localpart_template: "{{ user.preferred_username }}" - display_name_template: "{{ user.name }}" - backchannel_logout_enabled: true # Optional + email_template: "{{ user.email }}" ``` ### Auth0 @@ -262,149 +205,91 @@ oidc_providers: display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize. ``` -### LemonLDAP +### Dex -[LemonLDAP::NG][lemonldap] is an open-source IdP solution. +[Dex][dex-idp] is a simple, open-source OpenID Connect Provider. +Although it is designed to help building a full-blown provider with an +external database, it can be configured with static passwords in a config file. -1. Create an OpenID Connect Relying Parties in LemonLDAP::NG -2. The parameters are: -- Client ID under the basic menu of the new Relying Parties (`Options > Basic > - Client ID`) -- Client secret (`Options > Basic > Client secret`) -- JWT Algorithm: RS256 within the security menu of the new Relying Parties - (`Options > Security > ID Token signature algorithm` and `Options > Security > - Access Token signature algorithm`) -- Scopes: OpenID, Email and Profile -- Allowed redirection addresses for login (`Options > Basic > Allowed - redirection addresses for login` ) : - `[synapse public baseurl]/_synapse/client/oidc/callback` +Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/) +to install Dex. + +Edit `examples/config-dev.yaml` config file from the Dex repo to add a client: -Synapse config: ```yaml -oidc_providers: - - idp_id: lemonldap - idp_name: lemonldap - discover: true - issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain - client_id: "your client id" # TO BE FILLED - client_secret: "your client secret" # TO BE FILLED - scopes: - - "openid" - - "profile" - - "email" - user_mapping_provider: - config: - localpart_template: "{{ user.preferred_username }}}" - # TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter. - display_name_template: "{{ user.preferred_username|capitalize }}" +staticClients: +- id: synapse + secret: secret + redirectURIs: + - '[synapse public baseurl]/_synapse/client/oidc/callback' + name: 'Synapse' ``` -### GitHub - -[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but -just a regular OAuth2 provider. - -The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user) -can be used to retrieve information on the authenticated user. As the Synapse -login mechanism needs an attribute to uniquely identify users, and that endpoint -does not return a `sub` property, an alternative `subject_claim` has to be set. - -1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new). -2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. +Run with `dex serve examples/config-dev.yaml`. Synapse config: ```yaml oidc_providers: - - idp_id: github - idp_name: Github - idp_brand: "github" # optional: styling hint for clients - discover: false - issuer: "https://github.com/" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - authorization_endpoint: "https://github.com/login/oauth/authorize" - token_endpoint: "https://github.com/login/oauth/access_token" - userinfo_endpoint: "https://api.github.com/user" - scopes: ["read:user"] + - idp_id: dex + idp_name: "My Dex server" + skip_verification: true # This is needed as Dex is served on an insecure endpoint + issuer: "http://127.0.0.1:5556/dex" + client_id: "synapse" + client_secret: "secret" + scopes: ["openid", "profile"] user_mapping_provider: config: - subject_claim: "id" - localpart_template: "{{ user.login }}" - display_name_template: "{{ user.name }}" + localpart_template: "{{ user.name }}" + display_name_template: "{{ user.name|capitalize }}" ``` -### Google - -[Google][google-idp] is an OpenID certified authentication and authorisation provider. - -1. Set up a project in the Google API Console (see - [documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)). -3. Add an "OAuth Client ID" for a Web Application under "Credentials". -4. Copy the Client ID and Client Secret, and add the following to your synapse config: - ```yaml - oidc_providers: - - idp_id: google - idp_name: Google - idp_brand: "google" # optional: styling hint for clients - issuer: "https://accounts.google.com/" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - scopes: ["openid", "profile", "email"] # email is optional, read below - user_mapping_provider: - config: - localpart_template: "{{ user.given_name|lower }}" - display_name_template: "{{ user.name }}" - email_template: "{{ user.email }}" # needs "email" in scopes above - ``` -4. Back in the Google console, add this Authorized redirect URI: `[synapse - public baseurl]/_synapse/client/oidc/callback`. - -### Twitch - -1. Setup a developer account on [Twitch](https://dev.twitch.tv/) -2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/) -3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback` +### Django OAuth Toolkit -Synapse config: +[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a +Django application providing out of the box all the endpoints, data and logic +needed to add OAuth2 capabilities to your Django projects. It supports +[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html). -```yaml -oidc_providers: - - idp_id: twitch - idp_name: Twitch - issuer: "https://id.twitch.tv/oauth2/" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - client_auth_method: "client_secret_post" - user_mapping_provider: - config: - localpart_template: "{{ user.preferred_username }}" - display_name_template: "{{ user.name }}" -``` +Configuration on Django's side: -### GitLab +1. Add an application: `https://example.com/admin/oauth2_provider/application/add/` and choose parameters like this: +* `Redirect uris`: `https://synapse.example.com/_synapse/client/oidc/callback` +* `Client type`: `Confidential` +* `Authorization grant type`: `Authorization code` +* `Algorithm`: `HMAC with SHA-2 256` +2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional): + <details> + <summary>Code sample</summary> -1. Create a [new application](https://gitlab.com/profile/applications). -2. Add the `read_user` and `openid` scopes. -3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback` + ```python + class CustomOAuth2Validator(OAuth2Validator): -Synapse config: + def get_additional_claims(self, request): + return { + "sub": request.user.email, + "email": request.user.email, + "first_name": request.user.first_name, + "last_name": request.user.last_name, + } + ``` + </details> +Your synapse config is then: ```yaml oidc_providers: - - idp_id: gitlab - idp_name: Gitlab - idp_brand: "gitlab" # optional: styling hint for clients - issuer: "https://gitlab.com/" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - client_auth_method: "client_secret_post" - scopes: ["openid", "read_user"] - user_profile_method: "userinfo_endpoint" + - idp_id: django_example + idp_name: "Django Example" + issuer: "https://example.com/o/" + client_id: "your-client-id" # CHANGE ME + client_secret: "your-client-secret" # CHANGE ME + scopes: ["openid"] + user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response user_mapping_provider: config: - localpart_template: '{{ user.nickname }}' - display_name_template: '{{ user.name }}' + localpart_template: "{{ user.email.split('@')[0] }}" + display_name_template: "{{ user.first_name }} {{ user.last_name }}" + email_template: "{{ user.email }}" ``` ### Facebook @@ -451,6 +336,66 @@ but it has a `response_types_supported` which excludes "code" (which we rely on, is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)), so we have to disable discovery and configure the URIs manually. +### GitHub + +[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but +just a regular OAuth2 provider. + +The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user) +can be used to retrieve information on the authenticated user. As the Synapse +login mechanism needs an attribute to uniquely identify users, and that endpoint +does not return a `sub` property, an alternative `subject_claim` has to be set. + +1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new). +2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. + +Synapse config: + +```yaml +oidc_providers: + - idp_id: github + idp_name: Github + idp_brand: "github" # optional: styling hint for clients + discover: false + issuer: "https://github.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + authorization_endpoint: "https://github.com/login/oauth/authorize" + token_endpoint: "https://github.com/login/oauth/access_token" + userinfo_endpoint: "https://api.github.com/user" + scopes: ["read:user"] + user_mapping_provider: + config: + subject_claim: "id" + localpart_template: "{{ user.login }}" + display_name_template: "{{ user.name }}" +``` + +### GitLab + +1. Create a [new application](https://gitlab.com/profile/applications). +2. Add the `read_user` and `openid` scopes. +3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback` + +Synapse config: + +```yaml +oidc_providers: + - idp_id: gitlab + idp_name: Gitlab + idp_brand: "gitlab" # optional: styling hint for clients + issuer: "https://gitlab.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + client_auth_method: "client_secret_post" + scopes: ["openid", "read_user"] + user_profile_method: "userinfo_endpoint" + user_mapping_provider: + config: + localpart_template: '{{ user.nickname }}' + display_name_template: '{{ user.name }}' +``` + ### Gitea Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider. @@ -485,110 +430,123 @@ oidc_providers: display_name_template: "{{ user.full_name }}" ``` -### XWiki +### Google -Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance. +[Google][google-idp] is an OpenID certified authentication and authorisation provider. -Synapse config: +1. Set up a project in the Google API Console (see + [documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)). +3. Add an "OAuth Client ID" for a Web Application under "Credentials". +4. Copy the Client ID and Client Secret, and add the following to your synapse config: + ```yaml + oidc_providers: + - idp_id: google + idp_name: Google + idp_brand: "google" # optional: styling hint for clients + issuer: "https://accounts.google.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + scopes: ["openid", "profile", "email"] # email is optional, read below + user_mapping_provider: + config: + localpart_template: "{{ user.given_name|lower }}" + display_name_template: "{{ user.name }}" + email_template: "{{ user.email }}" # needs "email" in scopes above + ``` +4. Back in the Google console, add this Authorized redirect URI: `[synapse + public baseurl]/_synapse/client/oidc/callback`. -```yaml -oidc_providers: - - idp_id: xwiki - idp_name: "XWiki" - issuer: "https://myxwikihost/xwiki/oidc/" - client_id: "your-client-id" # TO BE FILLED - client_auth_method: none - scopes: ["openid", "profile"] - user_profile_method: "userinfo_endpoint" - user_mapping_provider: - config: - localpart_template: "{{ user.preferred_username }}" - display_name_template: "{{ user.name }}" -``` +### Keycloak -### Apple +[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat. -Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account. +Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak. +This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak. -You will need to create a new "Services ID" for SiWA, and create and download a -private key with "SiWA" enabled. +Follow the [Getting Started Guide](https://www.keycloak.org/guides) to install Keycloak and set up a realm. -As well as the private key file, you will need: - * Client ID: the "identifier" you gave the "Services ID" - * Team ID: a 10-character ID associated with your developer account. - * Key ID: the 10-character identifier for the key. +1. Click `Clients` in the sidebar and click `Create` -[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e) -has more information on setting up SiWA. +2. Fill in the fields as below: -The synapse config will look like this: +| Field | Value | +|-----------|-----------| +| Client ID | `synapse` | +| Client Protocol | `openid-connect` | + +3. Click `Save` +4. Fill in the fields as below: + +| Field | Value | +|-----------|-----------| +| Client ID | `synapse` | +| Enabled | `On` | +| Client Protocol | `openid-connect` | +| Access Type | `confidential` | +| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` | +| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` | +| Backchannel Logout Session Required (optional) | `On` | + +5. Click `Save` +6. On the Credentials tab, update the fields: + +| Field | Value | +|-------|-------| +| Client Authenticator | `Client ID and Secret` | + +7. Click `Regenerate Secret` +8. Copy Secret ```yaml - - idp_id: apple - idp_name: Apple - issuer: "https://appleid.apple.com" - client_id: "your-client-id" # Set to the "identifier" for your "ServicesID" - client_auth_method: "client_secret_post" - client_secret_jwt_key: - key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file - jwt_header: - alg: ES256 - kid: "KEYIDCODE" # Set to the 10-char Key ID - jwt_payload: - iss: TEAMIDCODE # Set to the 10-char Team ID - scopes: ["name", "email", "openid"] - authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post +oidc_providers: + - idp_id: keycloak + idp_name: "My KeyCloak server" + issuer: "https://127.0.0.1:8443/realms/{realm_name}" + client_id: "synapse" + client_secret: "copy secret generated from above" + scopes: ["openid", "profile"] user_mapping_provider: config: - email_template: "{{ user.email }}" + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" + backchannel_logout_enabled: true # Optional ``` -### Django OAuth Toolkit - -[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a -Django application providing out of the box all the endpoints, data and logic -needed to add OAuth2 capabilities to your Django projects. It supports -[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html). - -Configuration on Django's side: - -1. Add an application: `https://example.com/admin/oauth2_provider/application/add/` and choose parameters like this: -* `Redirect uris`: `https://synapse.example.com/_synapse/client/oidc/callback` -* `Client type`: `Confidential` -* `Authorization grant type`: `Authorization code` -* `Algorithm`: `HMAC with SHA-2 256` -2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional): - <details> - <summary>Code sample</summary> +### LemonLDAP - ```python - class CustomOAuth2Validator(OAuth2Validator): +[LemonLDAP::NG][lemonldap] is an open-source IdP solution. - def get_additional_claims(self, request): - return { - "sub": request.user.email, - "email": request.user.email, - "first_name": request.user.first_name, - "last_name": request.user.last_name, - } - ``` - </details> -Your synapse config is then: +1. Create an OpenID Connect Relying Parties in LemonLDAP::NG +2. The parameters are: +- Client ID under the basic menu of the new Relying Parties (`Options > Basic > + Client ID`) +- Client secret (`Options > Basic > Client secret`) +- JWT Algorithm: RS256 within the security menu of the new Relying Parties + (`Options > Security > ID Token signature algorithm` and `Options > Security > + Access Token signature algorithm`) +- Scopes: OpenID, Email and Profile +- Allowed redirection addresses for login (`Options > Basic > Allowed + redirection addresses for login` ) : + `[synapse public baseurl]/_synapse/client/oidc/callback` +Synapse config: ```yaml oidc_providers: - - idp_id: django_example - idp_name: "Django Example" - issuer: "https://example.com/o/" - client_id: "your-client-id" # CHANGE ME - client_secret: "your-client-secret" # CHANGE ME - scopes: ["openid"] - user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response + - idp_id: lemonldap + idp_name: lemonldap + discover: true + issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain + client_id: "your client id" # TO BE FILLED + client_secret: "your client secret" # TO BE FILLED + scopes: + - "openid" + - "profile" + - "email" user_mapping_provider: config: - localpart_template: "{{ user.email.split('@')[0] }}" - display_name_template: "{{ user.first_name }} {{ user.last_name }}" - email_template: "{{ user.email }}" + localpart_template: "{{ user.preferred_username }}}" + # TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter. + display_name_template: "{{ user.preferred_username|capitalize }}" ``` ### Mastodon @@ -631,3 +589,81 @@ oidc_providers: ``` Note that the fields `client_id` and `client_secret` are taken from the CURL response above. + +### Twitch + +1. Setup a developer account on [Twitch](https://dev.twitch.tv/) +2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/) +3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback` + +Synapse config: + +```yaml +oidc_providers: + - idp_id: twitch + idp_name: Twitch + issuer: "https://id.twitch.tv/oauth2/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + client_auth_method: "client_secret_post" + user_mapping_provider: + config: + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" +``` + +### Twitter + +*Using Twitter as an identity provider requires using Synapse 1.75.0 or later.* + +1. Setup a developer account on [Twitter](https://developer.twitter.com/en/portal/dashboard) +2. Create a project & app. +3. Enable user authentication and under "Type of App" choose "Web App, Automated App or Bot". +4. Under "App info" set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. +5. Obtain the OAuth 2.0 credentials under the "Keys and tokens" tab, copy the "OAuth 2.0 Client ID and Client Secret" + +Synapse config: + +```yaml +oidc_providers: + - idp_id: twitter + idp_name: Twitter + idp_brand: "twitter" # optional: styling hint for clients + discover: false # Twitter is not OpenID compliant. + issuer: "https://twitter.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + pkce_method: "always" + # offline.access providers refresh tokens, tweet.read and users.read needed for userinfo request. + scopes: ["offline.access", "tweet.read", "users.read"] + authorization_endpoint: https://twitter.com/i/oauth2/authorize + token_endpoint: https://api.twitter.com/2/oauth2/token + userinfo_endpoint: https://api.twitter.com/2/users/me?user.fields=profile_image_url + user_mapping_provider: + config: + subject_template: "{{ user.data.id }}" + localpart_template: "{{ user.data.username }}" + display_name_template: "{{ user.data.name }}" + picture_template: "{{ user.data.profile_image_url }}" +``` + +### XWiki + +Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance. + +Synapse config: + +```yaml +oidc_providers: + - idp_id: xwiki + idp_name: "XWiki" + issuer: "https://myxwikihost/xwiki/oidc/" + client_id: "your-client-id" # TO BE FILLED + client_auth_method: none + scopes: ["openid", "profile"] + user_profile_method: "userinfo_endpoint" + user_mapping_provider: + config: + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" +``` diff --git a/docs/postgres.md b/docs/postgres.md
index 46b4603fe5..fba4430f33 100644 --- a/docs/postgres.md +++ b/docs/postgres.md
@@ -16,7 +16,7 @@ connect to a postgres database. - For other pre-built packages, please consult the documentation from the relevant package. - If you installed synapse [in a - virtualenv](setup/installation.md#installing-from-source), you can install + virtualenv](setup/installation.md#installing-as-a-python-module-from-pypi), you can install the library with: ~/synapse/env/bin/pip install "matrix-synapse[postgres]" diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md
index 48dbc1c58e..06337e7c00 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md
@@ -46,7 +46,7 @@ when using a containerized Synapse, as that will prevent it from responding to proxied traffic.) Optionally, you can also set -[`request_id_header`](../usage/configuration/config_documentation.md#listeners) +[`request_id_header`](./usage/configuration/config_documentation.md#listeners) so that the server extracts and re-uses the same request ID format that the reverse proxy is using. diff --git a/docs/setup/installation.md b/docs/setup/installation.md
index a762ad55df..d123e339ed 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md
@@ -136,7 +136,7 @@ Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 reposi #### ArchLinux The quickest way to get up and running with ArchLinux is probably with the community package -<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of +<https://archlinux.org/packages/community/x86_64/matrix-synapse/>, which should pull in most of the necessary dependencies. pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ): diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md
index 9f5e5fbbe1..a5d4659619 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md
@@ -120,7 +120,7 @@ specified in the config. It is located at ## SAML Mapping Providers The SAML mapping provider can be customized by editing the -[`saml2_config.user_mapping_provider.module`](docs/usage/configuration/config_documentation.md#saml2_config) +[`saml2_config.user_mapping_provider.module`](usage/configuration/config_documentation.md#saml2_config) config option. `saml2_config.user_mapping_provider.config` allows you to provide custom diff --git a/docs/upgrade.md b/docs/upgrade.md
index dcae12ec94..c4bc5889a9 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md
@@ -99,7 +99,7 @@ the ICU native dependency and its development headers so that PyICU can build since no prebuilt wheels are available. You can follow [the PyICU documentation](https://pypi.org/project/PyICU/) to do so, -and then do `pip install matrix-synapse[icu]` for a PyPI install. +and then do `pip install matrix-synapse[user-search]` for a PyPI install. Docker images and Debian packages need nothing specific as they already include or specify ICU as an explicit dependency. @@ -889,8 +889,8 @@ Any scripts still using the above APIs should be converted to use the ## User-interactive authentication fallback templates can now display errors This may affect you if you make use of custom HTML templates for the -[reCAPTCHA](../synapse/res/templates/recaptcha.html) or -[terms](../synapse/res/templates/terms.html) fallback pages. +[reCAPTCHA (`synapse/res/templates/recaptcha.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/recaptcha.html) or +[terms (`synapse/res/templates/terms.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/terms.html) fallback pages. The template is now provided an `error` variable if the authentication process failed. See the default templates linked above for an example. @@ -1488,7 +1488,7 @@ New templates (`sso_auth_confirm.html`, `sso_auth_success.html`, and is configured to use SSO and a custom `sso_redirect_confirm_template_dir` configuration then these templates will need to be copied from -[synapse/res/templates](synapse/res/templates) into that directory. +[`synapse/res/templates`](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates) into that directory. ## Synapse SSO Plugins Method Deprecation diff --git a/docs/usage/administration/admin_api/README.md b/docs/usage/administration/admin_api/README.md
index c00de2dd44..7c85bf751b 100644 --- a/docs/usage/administration/admin_api/README.md +++ b/docs/usage/administration/admin_api/README.md
@@ -7,7 +7,7 @@ server admin. (Note that a server admin is distinct from a room admin.) An existing user can be marked as a server admin by updating the database directly. -Check your [database settings](config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator. +Check your [database settings](../../configuration/config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator. ```sql UPDATE users SET admin = 1 WHERE name = '@foo:bar.com'; ``` @@ -32,10 +32,10 @@ curl --header "Authorization: Bearer <access_token>" <the_rest_of_your_API_reque ``` For example, suppose we want to -[query the account](user_admin_api.md#query-user-account) of the user +[query the account](../../../admin_api/user_admin_api.md#query-user-account) of the user `@foo:bar.com`. We need an admin access token (e.g. `syt_AjfVef2_L33JNpafeif_0feKJfeaf0CQpoZk`), and we need to know which port -Synapse's [`client` listener](config_documentation.md#listeners) is listening +Synapse's [`client` listener](../../configuration/config_documentation.md#listeners) is listening on (e.g. `8008`). Then we can use the following command to request the account information from the Admin API. diff --git a/docs/usage/administration/admin_api/federation.md b/docs/usage/administration/admin_api/federation.md
index 60cbc5265e..51f3b52da8 100644 --- a/docs/usage/administration/admin_api/federation.md +++ b/docs/usage/administration/admin_api/federation.md
@@ -81,7 +81,7 @@ The following fields are returned in the JSON response body: - `failure_ts` - nullable integer - The first time Synapse tried and failed to reach the remote server, in ms. This is `null` if communication with the remote server has never failed. - `last_successful_stream_ordering` - nullable integer - The stream ordering of the most - recent successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation) + recent successfully-sent [PDU](../understanding_synapse_through_grafana_graphs.md#federation) to this destination, or `null` if this information has not been tracked yet. - `next_token`: string representing a positive integer - Indication for pagination. See above. - `total` - integer - Total number of destinations. @@ -174,7 +174,7 @@ The following fields are returned in the JSON response body: Room objects contain the following fields: - `room_id` - string - The ID of the room. - `stream_ordering` - integer - The stream ordering of the most recent - successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation) + successfully-sent [PDU](../understanding_synapse_through_grafana_graphs.md#federation) to this destination in this room. - `next_token`: string representing a positive integer - Indication for pagination. See above. - `total` - integer - Total number of destinations. diff --git a/docs/usage/administration/admin_api/registration_tokens.md b/docs/usage/administration/admin_api/registration_tokens.md
index 90cbc21125..c5130859d4 100644 --- a/docs/usage/administration/admin_api/registration_tokens.md +++ b/docs/usage/administration/admin_api/registration_tokens.md
@@ -6,7 +6,7 @@ registration requests, as proposed in and stabilised in version 1.2 of the Matrix specification. To use it, you will need to enable the `registration_requires_token` config option, and authenticate by providing an `access_token` for a server admin: -see [Admin API](../admin_api). +see [Admin API](../admin_api/). ## Registration token objects diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md
index 0bfb732464..a6dc6197c9 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md
@@ -2,7 +2,7 @@ How do I become a server admin? --- -If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins. +If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins. If you don't have any admin accounts yet you won't be able to use the admin API, so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account: use the admin APIs to make further changes. @@ -115,7 +115,7 @@ something like the following in their logs: 2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server> -This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](docs/reverse_proxy.md) and double-check that your settings are correct. +This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](../../reverse_proxy.md) and double-check that your settings are correct. Help!! Synapse is slow and eats all my RAM/CPU! diff --git a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md
index 4e53f9883a..3a7ed7c806 100644 --- a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md +++ b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md
@@ -78,4 +78,4 @@ If you would like to set up your own statistics collection server and send metri consider using one of the following known implementations: * [Matrix.org's Panopticon](https://github.com/matrix-org/panopticon) -* [Famedly's Barad-dûr](https://gitlab.com/famedly/company/devops/services/barad-dur) +* [Famedly's Barad-dûr](https://gitlab.com/famedly/infra/services/barad-dur) diff --git a/docs/usage/administration/request_log.md b/docs/usage/administration/request_log.md
index 82f5ac7b96..7dd9969d86 100644 --- a/docs/usage/administration/request_log.md +++ b/docs/usage/administration/request_log.md
@@ -1,6 +1,6 @@ # Request log format -HTTP request logs are written by synapse (see [`site.py`](../synapse/http/site.py) for details). +HTTP request logs are written by synapse (see [`synapse/http/site.py`](https://github.com/matrix-org/synapse/tree/develop/synapse/http/site.py) for details). See the following for how to decode the dense data available from the default logging configuration. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md
index 6b8768f45d..93d6c7fb02 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md
@@ -569,6 +569,115 @@ Example configuration: ```yaml delete_stale_devices_after: 1y ``` +--- +### `email` + +Configuration for sending emails from Synapse. + +Server admins can configure custom templates for email content. See +[here](../../templates.md) for more information. + +This setting has the following sub-options: +* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. +* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25. + + _Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`. +* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no + authentication is attempted. +* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades + to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS), + and the option `require_transport_security` is ignored. + It is recommended to enable this if supported by your mail server. + + _New in Synapse 1.64.0._ +* `require_transport_security`: Set to true to require TLS transport security for SMTP. + By default, Synapse will connect over plain text, and will then switch to + TLS via STARTTLS *if the SMTP server supports it*. If this option is set, + Synapse will refuse to connect unless the server supports STARTTLS. +* `enable_tls`: By default, if the server supports TLS, it will be used, and the server + must present a certificate that is valid for 'smtp_host'. If this option + is set to false, TLS will not be used. +* `notif_from`: defines the "From" address to use when sending emails. + It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name, + which is normally set in `app_name`, but may be overridden by the + Matrix client application. Note that the placeholder must be written '%(app)s', including the + trailing 's'. +* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email + subjects. It defaults to 'Matrix'. +* `enable_notifs`: Set to true to enable sending emails for messages that the user + has missed. Disabled by default. +* `notif_for_new_users`: Set to false to disable automatic subscription to email + notifications for new users. Enabled by default. +* `client_base_url`: Custom URL for client links within the email notifications. By default + links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`; + the old name is still supported for backwards-compatibility but is now deprecated.) +* `validation_token_lifetime`: Configures the time that a validation email will expire after sending. + Defaults to 1h. +* `invite_client_location`: The web client location to direct users to during an invite. This is passed + to the identity server as the `org.matrix.web_client_location` key. Defaults + to unset, giving no guidance to the identity server. +* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will + be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application. + In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname + of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the + message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will + can use the '%(server_name)s' placeholder, which will be replaced by the value of the + `server_name` setting in your Synapse configuration. + + Here is a list of subjects for notification emails that can be set: + * `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a + room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." + * `message_from_person`: Subject to use to notify about one message from one or more user(s) in a + room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..." + * `messages_from_person`: Subject to use to notify about multiple messages from one or more users in + a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..." + * `messages_in_room`: Subject to use to notify about multiple messages in a room which has a + name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..." + * `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms. + Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." + * `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in + multiple rooms. This is similar to the setting above except it's used when + the room in which the notification was triggered has no name. Defaults to + "[%(app)s] You have messages on %(app)s from %(person)s and others..." + * `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name. + Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." + * `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a + name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..." + * `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset" + * `email_validation`: Subject to use when sending a verification email to assert an address's + ownership. Defaults to "[%(server_name)s] Validate your email" + +Example configuration: + +```yaml +email: + smtp_host: mail.server + smtp_port: 587 + smtp_user: "exampleusername" + smtp_pass: "examplepassword" + force_tls: true + require_transport_security: true + enable_tls: false + notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>" + app_name: my_branded_matrix_server + enable_notifs: true + notif_for_new_users: false + client_base_url: "http://localhost/riot" + validation_token_lifetime: 15m + invite_client_location: https://app.element.io + + subjects: + message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." + message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..." + messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..." + messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..." + messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." + messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..." + invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." + invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..." + password_reset: "[%(server_name)s] Password reset" + email_validation: "[%(server_name)s] Validate your email" +``` ## Homeserver blocking Useful options for Synapse admins. @@ -1212,7 +1321,7 @@ Associated sub-options: connection pool. For a reference to valid arguments, see: * for [sqlite](https://docs.python.org/3/library/sqlite3.html#sqlite3.connect) * for [postgres](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS) - * for [the connection pool](https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__) + * for [the connection pool](https://docs.twistedmatrix.com/en/stable/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__) For more information on using Synapse with Postgres, see [here](../../postgres.md). @@ -2514,18 +2623,18 @@ state events are shared with users: - `m.room.topic` To change the default behavior, use the following sub-options: -* `disable_default_event_types`: boolean. Set to `true` to disable the above +* `disable_default_event_types`: boolean. Set to `true` to disable the above defaults. If this is enabled, only the event types listed in `additional_event_types` are shared. Defaults to `false`. -* `additional_event_types`: A list of additional state events to include in the - events to be shared. By default, this list is empty (so only the default event +* `additional_event_types`: A list of additional state events to include in the + events to be shared. By default, this list is empty (so only the default event types are shared). Each entry in this list should be either a single string or a list of two - strings. + strings. * A standalone string `t` represents all events with type `t` (i.e. with no restrictions on state keys). - * A pair of strings `[t, s]` represents a single event with type `t` and + * A pair of strings `[t, s]` represents a single event with type `t` and state key `s`. The same type can appear in two entries with different state keys: in this situation, both state keys are included in prejoin state. @@ -2944,8 +3053,13 @@ Options for each entry include: values are `client_secret_basic` (default), `client_secret_post` and `none`. +* `pkce_method`: Whether to use proof key for code exchange when requesting + and exchanging the token. Valid values are: `auto`, `always`, or `never`. Defaults + to `auto`, which uses PKCE if supported during metadata discovery. Set to `always` + to force enable PKCE or `never` to force disable PKCE. + * `scopes`: list of scopes to request. This should normally include the "openid" - scope. Defaults to ["openid"]. + scope. Defaults to `["openid"]`. * `authorization_endpoint`: the oauth2 authorization endpoint. Required if provider discovery is disabled. @@ -2989,17 +3103,35 @@ Options for each entry include: For the default provider, the following settings are available: + * `subject_template`: Jinja2 template for a unique identifier for the user. + Defaults to `{{ user.sub }}`, which OpenID Connect compliant providers should provide. + + This replaces and overrides `subject_claim`. + * `subject_claim`: name of the claim containing a unique identifier for the user. Defaults to 'sub', which OpenID Connect compliant providers should provide. + *Deprecated in Synapse v1.75.0.* + + * `picture_template`: Jinja2 template for an url for the user's profile picture. + Defaults to `{{ user.picture }}`, which OpenID Connect compliant providers should + provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file. + + This replaces and overrides `picture_claim`. + + Currently only supported in monolithic (single-process) server configurations + where the media repository runs within the Synapse process. + * `picture_claim`: name of the claim containing an url for the user's profile picture. Defaults to 'picture', which OpenID Connect compliant providers should provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file. - + Currently only supported in monolithic (single-process) server configurations where the media repository runs within the Synapse process. + *Deprecated in Synapse v1.75.0.* + * `localpart_template`: Jinja2 template for the localpart of the MXID. If this is not set, the user will be prompted to choose their own username (see the documentation for the `sso_auth_account_details.html` @@ -3259,114 +3391,6 @@ ui_auth: session_timeout: "15s" ``` --- -### `email` - -Configuration for sending emails from Synapse. - -Server admins can configure custom templates for email content. See -[here](../../templates.md) for more information. - -This setting has the following sub-options: -* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. -* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25. - - _Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`. -* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no - authentication is attempted. -* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades - to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS), - and the option `require_transport_security` is ignored. - It is recommended to enable this if supported by your mail server. - - _New in Synapse 1.64.0._ -* `require_transport_security`: Set to true to require TLS transport security for SMTP. - By default, Synapse will connect over plain text, and will then switch to - TLS via STARTTLS *if the SMTP server supports it*. If this option is set, - Synapse will refuse to connect unless the server supports STARTTLS. -* `enable_tls`: By default, if the server supports TLS, it will be used, and the server - must present a certificate that is valid for 'smtp_host'. If this option - is set to false, TLS will not be used. -* `notif_from`: defines the "From" address to use when sending emails. - It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name, - which is normally set in `app_name`, but may be overridden by the - Matrix client application. Note that the placeholder must be written '%(app)s', including the - trailing 's'. -* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email - subjects. It defaults to 'Matrix'. -* `enable_notifs`: Set to true to enable sending emails for messages that the user - has missed. Disabled by default. -* `notif_for_new_users`: Set to false to disable automatic subscription to email - notifications for new users. Enabled by default. -* `client_base_url`: Custom URL for client links within the email notifications. By default - links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`; - the old name is still supported for backwards-compatibility but is now deprecated.) -* `validation_token_lifetime`: Configures the time that a validation email will expire after sending. - Defaults to 1h. -* `invite_client_location`: The web client location to direct users to during an invite. This is passed - to the identity server as the `org.matrix.web_client_location` key. Defaults - to unset, giving no guidance to the identity server. -* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will - be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application. - In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname - of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the - message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will - can use the '%(server_name)s' placeholder, which will be replaced by the value of the - `server_name` setting in your Synapse configuration. - - Here is a list of subjects for notification emails that can be set: - * `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a - room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." - * `message_from_person`: Subject to use to notify about one message from one or more user(s) in a - room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..." - * `messages_from_person`: Subject to use to notify about multiple messages from one or more users in - a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..." - * `messages_in_room`: Subject to use to notify about multiple messages in a room which has a - name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..." - * `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms. - Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." - * `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in - multiple rooms. This is similar to the setting above except it's used when - the room in which the notification was triggered has no name. Defaults to - "[%(app)s] You have messages on %(app)s from %(person)s and others..." - * `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name. - Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." - * `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a - name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..." - * `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset" - * `email_validation`: Subject to use when sending a verification email to assert an address's - ownership. Defaults to "[%(server_name)s] Validate your email" - -Example configuration: -```yaml -email: - smtp_host: mail.server - smtp_port: 587 - smtp_user: "exampleusername" - smtp_pass: "examplepassword" - force_tls: true - require_transport_security: true - enable_tls: false - notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>" - app_name: my_branded_matrix_server - enable_notifs: true - notif_for_new_users: false - client_base_url: "http://localhost/riot" - validation_token_lifetime: 15m - invite_client_location: https://app.element.io - - subjects: - message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." - message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..." - messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..." - messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..." - messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." - messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..." - invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." - invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..." - password_reset: "[%(server_name)s] Password reset" - email_validation: "[%(server_name)s] Validate your email" -``` ---- ## Push Configuration settings related to push notifications @@ -3841,6 +3865,48 @@ Example configuration: run_background_tasks_on: worker1 ``` --- +### `update_user_directory_from_worker` + +The [worker](../../workers.md#updating-the-user-directory) that is used to +update the user directory. If not provided this defaults to the main process. + +Example configuration: +```yaml +update_user_directory_from_worker: worker1 +``` + +_Added in Synapse 1.59.0._ + +--- +### `notify_appservices_from_worker` + +The [worker](../../workers.md#notifying-application-services) that is used to +send output traffic to Application Services. If not provided this defaults +to the main process. + +Example configuration: +```yaml +notify_appservices_from_worker: worker1 +``` + +_Added in Synapse 1.59.0._ + +--- +### `media_instance_running_background_jobs` + +The [worker](../../workers.md#synapseappmedia_repository) that is used to run +background tasks for media repository. If running multiple media repositories +you must configure a single instance to run the background tasks. If not provided +this defaults to the main process or your single `media_repository` worker. + +Example configuration: +```yaml +media_instance_running_background_jobs: worker1 +``` + +_Added in Synapse 1.16.0._ + +--- ### `redis` Configuration for Redis when using workers. This *must* be enabled when using workers. @@ -3957,7 +4023,7 @@ worker_listeners: ### `worker_daemonize` Specifies whether the worker should be started as a daemon process. -If Synapse is being managed by [systemd](../../systemd-with-workers/README.md), this option +If Synapse is being managed by [systemd](../../systemd-with-workers/), this option must be omitted or set to `false`. Defaults to `false`. diff --git a/docs/workers.md b/docs/workers.md
index 59a6487e0d..bc66f0e1bc 100644 --- a/docs/workers.md +++ b/docs/workers.md
@@ -157,7 +157,7 @@ Finally, you need to start your worker processes. This can be done with either `synctl` or your distribution's preferred service manager such as `systemd`. We recommend the use of `systemd` where available: for information on setting up `systemd` to start synapse workers, see -[Systemd with Workers](systemd-with-workers). To use `synctl`, see +[Systemd with Workers](systemd-with-workers/). To use `synctl`, see [Using synctl with Workers](synctl_workers.md). @@ -386,7 +386,7 @@ so. It will then pass those events over HTTP replication to any configured event persisters (or the main process if none are configured). Note that `event_creator`s and `event_persister`s are implemented using the same -[`synapse.app.generic_worker`](#synapse.app.generic_worker). +[`synapse.app.generic_worker`](#synapseappgeneric_worker). An example [`stream_writers`](usage/configuration/config_documentation.md#stream_writers) configuration with multiple writers: @@ -465,7 +465,8 @@ An example for a dedicated background worker instance: You can designate one generic worker to update the user directory. -Specify its name in the shared configuration as follows: +Specify its name in the [shared configuration](usage/configuration/config_documentation.md#update_user_directory_from_worker) +as follows: ```yaml update_user_directory_from_worker: worker_name @@ -490,7 +491,8 @@ worker application type. You can designate one generic worker to send output traffic to Application Services. Doesn't handle any REST endpoints itself, but you should specify its name in the -shared configuration as follows: +[shared configuration](usage/configuration/config_documentation.md#notify_appservices_from_worker) +as follows: ```yaml notify_appservices_from_worker: worker_name @@ -502,11 +504,38 @@ after setting this option in the shared configuration! This style of configuration supersedes the legacy `synapse.app.appservice` worker application type. +#### Push Notifications + +You can designate generic worker to sending push notifications to +a [push gateway](https://spec.matrix.org/v1.5/push-gateway-api/) such as +[sygnal](https://github.com/matrix-org/sygnal) and email. + +This will stop the main process sending push notifications. + +The workers responsible for sending push notifications can be defined using the +[`pusher_instances`](usage/configuration/config_documentation.md#pusher_instances) +option. For example: + +```yaml +pusher_instances: + - pusher_worker1 + - pusher_worker2 +``` + +Multiple workers can be added to this map, in which case the work is balanced +across them. Ensure the main process and all pusher workers are restarted after changing +this option. + +These workers don't need to accept incoming HTTP requests to send push notifications, +so no additional reverse proxy configuration is required for pusher workers. + +This style of configuration supersedes the legacy `synapse.app.pusher` +worker application type. ### `synapse.app.pusher` It is likely this option will be deprecated in the future and is not recommended for new -installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](usage/configuration/config_documentation.md#pusher_instances). +installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](#push-notifications). Handles sending push notifications to sygnal and email. Doesn't handle any REST endpoints itself, but you should set @@ -547,7 +576,7 @@ Note this worker cannot be load-balanced: only one instance should be active. ### `synapse.app.federation_sender` It is likely this option will be deprecated in the future and not recommended for -new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances). +new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances). Handles sending federation traffic to other servers. Doesn't handle any REST endpoints itself, but you should set @@ -606,7 +635,9 @@ expose the `media` resource. For example: ``` Note that if running multiple media repositories they must be on the same server -and you must configure a single instance to run the background tasks, e.g.: +and you must specify a single instance to run the background tasks in the +[shared configuration](usage/configuration/config_documentation.md#media_instance_running_background_jobs), +e.g.: ```yaml media_instance_running_background_jobs: "media-repository-1" diff --git a/mypy.ini b/mypy.ini
index 80fbcdfeab..013fbbdfc0 100644 --- a/mypy.ini +++ b/mypy.ini
@@ -36,7 +36,6 @@ exclude = (?x) |tests/api/test_ratelimiting.py |tests/app/test_openid_listener.py |tests/appservice/test_scheduler.py - |tests/crypto/test_keyring.py |tests/events/test_presence_router.py |tests/events/test_utils.py |tests/federation/test_federation_catch_up.py @@ -90,13 +89,16 @@ disallow_untyped_defs = False [mypy-tests.config.*] disallow_untyped_defs = True +[mypy-tests.crypto.*] +disallow_untyped_defs = True + [mypy-tests.federation.transport.test_client] disallow_untyped_defs = True [mypy-tests.handlers.*] disallow_untyped_defs = True -[mypy-tests.metrics.test_background_process_metrics] +[mypy-tests.metrics.*] disallow_untyped_defs = True [mypy-tests.push.test_bulk_push_rule_evaluator] diff --git a/poetry.lock b/poetry.lock
index 9a9a141a14..f148eaf8cf 100644 --- a/poetry.lock +++ b/poetry.lock
@@ -1,16 +1,17 @@ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "authlib" @@ -52,7 +53,7 @@ typecheck = ["mypy"] [[package]] name = "black" -version = "22.10.0" +version = "22.12.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -114,7 +115,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -245,47 +246,6 @@ python-versions = ">=3.7" dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"] [[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.6.1" - -[package.dependencies] -importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" - -[[package]] -name = "flake8-bugbear" -version = "22.12.6" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] - -[[package]] -name = "flake8-comprehensions" -version = "3.10.1" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] name = "frozendict" version = "2.3.4" description = "A simple immutable dictionary" @@ -306,7 +266,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" +version = "3.1.30" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -353,19 +313,20 @@ python-versions = "*" [[package]] name = "importlib-metadata" -version = "4.2.0" +version = "6.0.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -395,11 +356,11 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] name = "isort" -version = "5.10.1" +version = "5.11.4" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] @@ -554,14 +515,6 @@ Twisted = ">=15.1.0" dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"] [[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] name = "msgpack" version = "1.0.4" description = "MessagePack serializer" @@ -668,14 +621,14 @@ python-versions = "*" [[package]] name = "pillow" -version = "9.3.0" +version = "9.4.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -771,14 +724,6 @@ python-versions = "*" pyasn1 = ">=0.4.6,<0.5.0" [[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] name = "pycparser" version = "2.21" description = "C parser in Python" @@ -788,28 +733,20 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.2" +version = "1.10.4" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] [[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] name = "pygithub" version = "1.57" description = "Use the full Github API v3" @@ -893,14 +830,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "22.1.0" +version = "23.0.0" description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -cryptography = ">=38.0.0,<39" +cryptography = ">=38.0.0,<40" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] @@ -1045,6 +982,14 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] +name = "ruff" +version = "0.0.215" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] name = "secretstorage" version = "3.3.1" description = "Python bindings to FreeDesktop.org Secret Service API" @@ -1070,7 +1015,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.12.0" +version = "1.12.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true @@ -1125,15 +1070,15 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "65.3.0" +version = "65.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1252,7 +1197,7 @@ python-versions = ">= 3.5" [[package]] name = "towncrier" -version = "22.8.0" +version = "22.12.0" description = "Building newsfiles for your project." category = "dev" optional = false @@ -1264,10 +1209,10 @@ click-default-group = "*" incremental = "*" jinja2 = "*" setuptools = "*" -tomli = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] -dev = ["packaging"] +dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] [[package]] name = "treq" @@ -1402,6 +1347,14 @@ types-enum34 = "*" types-ipaddress = "*" [[package]] +name = "types-docutils" +version = "0.19.1.1" +description = "Typing stubs for docutils" +category = "dev" +optional = false +python-versions = "*" + +[[package]] name = "types-enum34" version = "1.1.8" description = "Typing stubs for enum34" @@ -1435,7 +1388,7 @@ python-versions = "*" [[package]] name = "types-pillow" -version = "9.3.0.4" +version = "9.4.0.0" description = "Typing stubs for Pillow" category = "dev" optional = false @@ -1470,7 +1423,7 @@ python-versions = "*" [[package]] name = "types-requests" -version = "2.28.11.5" +version = "2.28.11.7" description = "Typing stubs for requests" category = "dev" optional = false @@ -1481,12 +1434,15 @@ types-urllib3 = "<1.27" [[package]] name = "types-setuptools" -version = "65.6.0.2" +version = "65.6.0.3" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" +[package.dependencies] +types-docutils = "*" + [[package]] name = "types-urllib3" version = "1.26.10" @@ -1635,12 +1591,12 @@ user-search = ["pyicu"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "f20007013f33bc35a01e412c48adc62a936030f3074e06286674c5ad7f44d300" +content-hash = "53867af07a507c3addd614c828dfb26175f6604398848e84c0ea65980f8a59a2" [metadata.files] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] authlib = [ {file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"}, @@ -1674,27 +1630,18 @@ bcrypt = [ {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] bleach = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, @@ -1709,56 +1656,70 @@ certifi = [ {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, @@ -1827,18 +1788,6 @@ elementpath = [ {file = "elementpath-2.5.0-py3-none-any.whl", hash = "sha256:2a432775e37a19e4362443078130a7dbfc457d7d093cd421c03958d9034cc08b"}, {file = "elementpath-2.5.0.tar.gz", hash = "sha256:3a27aaf3399929fccda013899cb76d3ff111734abf4281e5f9d3721ba0b9ffa3"}, ] -flake8 = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -flake8-bugbear = [ - {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, - {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, -] -flake8-comprehensions = [ - {file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"}, - {file = "flake8_comprehensions-3.10.1-py3-none-any.whl", hash = "sha256:d763de3c74bc18a79c039a7ec732e0a1985b0c79309ceb51e56401ad0a2cd44e"}, -] frozendict = [ {file = "frozendict-2.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc"}, {file = "frozendict-2.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782"}, @@ -1863,8 +1812,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, ] hiredis = [ {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, @@ -1982,8 +1931,8 @@ ijson = [ {file = "ijson-3.1.4.tar.gz", hash = "sha256:1d1003ae3c6115ec9b587d29dd136860a81a23c7626b682e2b5b12c9fd30e4ea"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, @@ -1994,8 +1943,8 @@ incremental = [ {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, ] isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, ] jaeger-client = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, @@ -2046,6 +1995,7 @@ lxml = [ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, @@ -2055,6 +2005,7 @@ lxml = [ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, @@ -2147,10 +2098,6 @@ matrix-synapse-ldap3 = [ {file = "matrix-synapse-ldap3-0.2.2.tar.gz", hash = "sha256:b388d95693486eef69adaefd0fd9e84463d52fe17b0214a00efcaa669b73cb74"}, {file = "matrix_synapse_ldap3-0.2.2-py3-none-any.whl", hash = "sha256:66ee4c85d7952c6c27fd04c09cdfdf4847b8e8b7d6a7ada6ba1100013bda060f"}, ] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] msgpack = [ {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, @@ -2263,67 +2210,76 @@ phonenumbers = [ {file = "phonenumbers-8.13.2.tar.gz", hash = "sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a"}, ] pillow = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, ] pkginfo = [ {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"}, @@ -2370,55 +2326,47 @@ pyasn1-modules = [ {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, ] -pycodestyle = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, - {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, - {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, - {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, - {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, - {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, - {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, - {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, - {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, - {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, -] -pyflakes = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, + {file = "pydantic-1.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854"}, + {file = "pydantic-1.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6"}, + {file = "pydantic-1.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857"}, + {file = "pydantic-1.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3"}, + {file = "pydantic-1.10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8"}, + {file = "pydantic-1.10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416"}, + {file = "pydantic-1.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f"}, + {file = "pydantic-1.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4"}, + {file = "pydantic-1.10.4-py3-none-any.whl", hash = "sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774"}, + {file = "pydantic-1.10.4.tar.gz", hash = "sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648"}, ] pygithub = [ {file = "PyGithub-1.57-py3-none-any.whl", hash = "sha256:5822febeac2391f1306c55a99af2bc8f86c8bf82ded000030cd02c18f31b731f"}, @@ -2456,8 +2404,8 @@ pynacl = [ {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, ] pyopenssl = [ - {file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"}, - {file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"}, + {file = "pyOpenSSL-23.0.0-py3-none-any.whl", hash = "sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0"}, + {file = "pyOpenSSL-23.0.0.tar.gz", hash = "sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f"}, ] pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, @@ -2560,6 +2508,24 @@ rich = [ {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, ] +ruff = [ + {file = "ruff-0.0.215-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:a4963613bca6ffc448deca1ce3a3fc69af216d6234e5d7f256935d7407088724"}, + {file = "ruff-0.0.215-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7bcd7b07a88c6530bb4e80850d6cf261081b9d4147eb0ea91fbb85a332ba4fe6"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fcbf717a1e0c480b3d1fe9fd823043af463f067ec896746dab2123c4dcf10"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8199acc4a20d2b3761c4489171f45f37654f2d5ce096361221ea392f078b4be0"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f68624344209d07000aba115eeac551f362e278970112f0b69838c70f77f7df"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f3c846df8a83445c394e6be58b8e784ec8fc82d67de94f137026c43e6037958b"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65b13019821af35a3225a64f2c93877c1e8059b92bb13fce32281ceefeecd199"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a02cb67a7171418c5a90ad0d8f983b5fd29b321c9861e0164d126cda4869c61"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07d69e654d977842c327f26487ef9b7dba39204b113619d33b4139bd3fdd101c"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ed2a0e13c822f8f0c40e6fe6172ff9c88add55a1dac9e0c05315618f82375648"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1fd4b80bf34e20d18b01bf6d981973975184a85ed39f64934e11d00e2aba882f"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5360b476b8720fa76d9dd6ee980c563b930a08524c91c99edddb25364ef656d7"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:352534c0c2ffd491b331fe5982b1d3e88a6c2083a3c127466d4eed63918f6ea8"}, + {file = "ruff-0.0.215-py3-none-win32.whl", hash = "sha256:af3cd199a0c6f5b90b9c84a2b9b74b202901194b8b00d5d3e28a0a814037b73f"}, + {file = "ruff-0.0.215-py3-none-win_amd64.whl", hash = "sha256:aa6fe5b56b17a04c8db7f60fef21a9ff96109d10d9232b436ae2dfdc6cc70b7c"}, + {file = "ruff-0.0.215.tar.gz", hash = "sha256:a82ab1452396d5ca389bdcb182e8f273c5f7db854022d7a303764b6218e9e77e"}, +] secretstorage = [ {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"}, {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"}, @@ -2569,16 +2535,16 @@ semantic-version = [ {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.12.0.tar.gz", hash = "sha256:dc0fe6ef2f77a3853b399c75c97d87be7666098817c1c314f8fcdf68a6865914"}, - {file = "sentry_sdk-1.12.0-py2.py3-none-any.whl", hash = "sha256:3c9bc64025976842c1103cd75d45cff94a7c0cc48fa07770d07a09d2ab8dac30"}, + {file = "sentry-sdk-1.12.1.tar.gz", hash = "sha256:5bbe4b72de22f9ac1e67f2a4e6efe8fbd595bb59b7b223443f50fe5802a5551c"}, + {file = "sentry_sdk-1.12.1-py2.py3-none-any.whl", hash = "sha256:9f0b960694e2d8bb04db4ba6ac2a645040caef4e762c65937998ff06064f10d6"}, ] service-identity = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, ] setuptools = [ - {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, - {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, + {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, + {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, ] setuptools-rust = [ {file = "setuptools-rust-1.5.2.tar.gz", hash = "sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7"}, @@ -2721,8 +2687,8 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] towncrier = [ - {file = "towncrier-22.8.0-py2.py3-none-any.whl", hash = "sha256:3b780c3d966e1b26414830aec3d15000654b31e64e024f3e5fd128b4c6eb8f47"}, - {file = "towncrier-22.8.0.tar.gz", hash = "sha256:7d3839b033859b45fb55df82b74cfd702431933c0cc9f287a5a7ea3e05d042cb"}, + {file = "towncrier-22.12.0-py3-none-any.whl", hash = "sha256:9767a899a4d6856950f3598acd9e8f08da2663c49fdcda5ea0f9e6ba2afc8eea"}, + {file = "towncrier-22.12.0.tar.gz", hash = "sha256:9c49d7e75f646a9aea02ae904c0bc1639c8fd14a01292d2b123b8d307564034d"}, ] treq = [ {file = "treq-22.2.0-py3-none-any.whl", hash = "sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2"}, @@ -2792,6 +2758,10 @@ types-cryptography = [ {file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"}, {file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"}, ] +types-docutils = [ + {file = "types-docutils-0.19.1.1.tar.gz", hash = "sha256:be0a51ba1c7dd215d9d2df66d6845e63c1009b4bbf4c5beb87a0d9745cdba962"}, + {file = "types_docutils-0.19.1.1-py3-none-any.whl", hash = "sha256:a024cada35f0c13cc45eb0b68a102719018a634013690b7fef723bcbfadbd1f1"}, +] types-enum34 = [ {file = "types-enum34-1.1.8.tar.gz", hash = "sha256:6f9c769641d06d73a55e11c14d38ac76fcd37eb545ce79cebb6eec9d50a64110"}, {file = "types_enum34-1.1.8-py3-none-any.whl", hash = "sha256:05058c7a495f6bfaaca0be4aeac3cce5cdd80a2bad2aab01fd49a20bf4a0209d"}, @@ -2809,8 +2779,8 @@ types-opentracing = [ {file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"}, ] types-pillow = [ - {file = "types-Pillow-9.3.0.4.tar.gz", hash = "sha256:c18d466dc18550d96b8b4a279ff94f0cbad696825b5ad55466604f1daf5709de"}, - {file = "types_Pillow-9.3.0.4-py3-none-any.whl", hash = "sha256:98b8484ff343676f6f7051682a6cfd26896e993e86b3ce9badfa0ec8750f5405"}, + {file = "types-Pillow-9.4.0.0.tar.gz", hash = "sha256:ef8a823638ceb765a144a98a2f816b8912da0337c5c2556d33774f1434f9918c"}, + {file = "types_Pillow-9.4.0.0-py3-none-any.whl", hash = "sha256:246f0dc52d575ef64e01f06f41be37a492b542ee3180638a7b874a6dd4d48c01"}, ] types-psycopg2 = [ {file = "types-psycopg2-2.9.21.2.tar.gz", hash = "sha256:bff045579642ce00b4a3c8f2e401b7f96dfaa34939f10be64b0dd3b53feca57d"}, @@ -2825,12 +2795,12 @@ types-pyyaml = [ {file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"}, ] types-requests = [ - {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, - {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, + {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, + {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, ] types-setuptools = [ - {file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"}, - {file = "types_setuptools-65.6.0.2-py3-none-any.whl", hash = "sha256:2c2b4f756f79778074ce2d21f745aa737b12160d9f8dfa274f47a7287c7a2fee"}, + {file = "types-setuptools-65.6.0.3.tar.gz", hash = "sha256:7ddd7415282fa97ab18e490206067c0cdb126b103743e72ee86783d7af6481c5"}, + {file = "types_setuptools-65.6.0.3-py3-none-any.whl", hash = "sha256:ad729fc3a9a3946f73915eaab16ce56b30ed5ae998479253d809d76b3889ee09"}, ] types-urllib3 = [ {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, diff --git a/pyproject.toml b/pyproject.toml
index 21bc11da88..740d33066e 100644 --- a/pyproject.toml +++ b/pyproject.toml
@@ -40,6 +40,46 @@ target-version = ['py37', 'py38', 'py39', 'py310'] # https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore # Use `extend-exclude` if you want to exclude something in addition to this. +[tool.ruff] +line-length = 88 + +# See https://github.com/charliermarsh/ruff/#pycodestyle +# for error codes. The ones we ignore are: +# E731: do not assign a lambda expression, use a def +# E501: Line too long (black enforces this for us) +# +# See https://github.com/charliermarsh/ruff/#pyflakes +# F401: unused import +# F811: Redefinition of unused +# F821: Undefined name +# +# flake8-bugbear compatible checks. Its error codes are described at +# https://github.com/charliermarsh/ruff/#flake8-bugbear +# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks +# B023: Functions defined inside a loop must not use variables redefined in the loop +# B024: Abstract base class with no abstract method. +ignore = [ + "B019", + "B023", + "B024", + "E501", + "E731", + "F401", + "F811", + "F821", +] +select = [ + # pycodestyle checks. + "E", + "W", + # pyflakes checks. + "F", + # flake8-bugbear checks. + "B0", + # flake8-comprehensions checks. + "C4", +] + [tool.isort] line_length = 88 sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"] @@ -57,7 +97,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.74.0" +version = "1.75.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors <packages@matrix.org>"] license = "Apache-2.0" @@ -136,7 +176,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"} treq = ">=15.1" # Twisted has required pyopenssl 16.0 since about Twisted 16.6. pyOpenSSL = ">=16.0.0" -PyYAML = ">=3.11" +PyYAML = ">=3.13" pyasn1 = ">=0.1.9" pyasn1-modules = ">=0.0.7" bcrypt = ">=3.1.7" @@ -274,12 +314,10 @@ all = [ ] [tool.poetry.dev-dependencies] -## We pin black so that our tests don't start failing on new releases. +# We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -flake8-comprehensions = "*" -flake8-bugbear = ">=21.3.2" -flake8 = "*" +ruff = "0.0.215" # Typechecking mypy = "*" diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 8741ba3e34..51d1bac618 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh
@@ -190,7 +190,7 @@ fi extra_test_args=() -test_tags="synapse_blacklist,msc3787,msc3874" +test_tags="synapse_blacklist,msc3787,msc3874,msc3391" # All environment variables starting with PASS_ will be shared. # (The prefix is stripped off before reaching the container.) diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index bf900645b1..2bf58ac5d4 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh
@@ -1,9 +1,8 @@ #!/usr/bin/env bash # # Runs linting scripts over the local Synapse checkout -# isort - sorts import statements # black - opinionated code formatter -# flake8 - lints and finds mistakes +# ruff - lints and finds mistakes set -e @@ -105,6 +104,7 @@ set -x isort "${files[@]}" python3 -m black "${files[@]}" ./scripts-dev/config-lint.sh -flake8 "${files[@]}" +# --quiet suppresses the update check. +ruff --quiet "${files[@]}" ./scripts-dev/check_pydantic_models.py lint mypy diff --git a/stubs/frozendict.pyi b/stubs/frozendict.pyi
index 24c6f3af77..196dee4461 100644 --- a/stubs/frozendict.pyi +++ b/stubs/frozendict.pyi
@@ -14,6 +14,8 @@ # Stub for frozendict. +from __future__ import annotations + from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload _KT = TypeVar("_KT", bound=Hashable) # Key type. diff --git a/stubs/icu.pyi b/stubs/icu.pyi
index efeda7938a..7736df8a92 100644 --- a/stubs/icu.pyi +++ b/stubs/icu.pyi
@@ -14,6 +14,8 @@ # Stub for PyICU. +from __future__ import annotations + class Locale: @staticmethod def getDefault() -> Locale: ... diff --git a/stubs/sortedcontainers/sorteddict.pyi b/stubs/sortedcontainers/sorteddict.pyi
index 7c399ab38d..81f581b034 100644 --- a/stubs/sortedcontainers/sorteddict.pyi +++ b/stubs/sortedcontainers/sorteddict.pyi
@@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/eea42df1f7bad2792e8da77335ff888f04b9e5ae/sortedcontainers/sorteddict.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( Any, Callable, diff --git a/stubs/sortedcontainers/sortedlist.pyi b/stubs/sortedcontainers/sortedlist.pyi
index 403897e391..cd4c969849 100644 --- a/stubs/sortedcontainers/sortedlist.pyi +++ b/stubs/sortedcontainers/sortedlist.pyi
@@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/a419ffbd2b1c935b09f11f0971696e537fd0c510/sortedcontainers/sortedlist.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( Any, Callable, diff --git a/stubs/sortedcontainers/sortedset.pyi b/stubs/sortedcontainers/sortedset.pyi
index 43c860f422..d761c438f7 100644 --- a/stubs/sortedcontainers/sortedset.pyi +++ b/stubs/sortedcontainers/sortedset.pyi
@@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/d0a225d7fd0fb4c54532b8798af3cbeebf97e2d5/sortedcontainers/sortedset.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( AbstractSet, Any, diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py
index d850e54e17..c463b60b26 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py
@@ -1307,7 +1307,7 @@ def main() -> None: sqlite_config = { "name": "sqlite3", "args": { - "database": args.sqlite_database, + "database": "file:{}?mode=rw".format(args.sqlite_database), "cp_min": 1, "cp_max": 1, "check_same_thread": False, diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index a9888381b4..2b5af264b4 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py
@@ -283,6 +283,9 @@ class FilterCollection: await self._room_filter.filter(events) ) + def blocks_all_rooms(self) -> bool: + return self._room_filter.filters_all_rooms() + def blocks_all_presence(self) -> bool: return ( self._presence_filter.filters_all_types() @@ -351,13 +354,13 @@ class Filter: self.not_rel_types = filter_json.get("org.matrix.msc3874.not_rel_types", []) def filters_all_types(self) -> bool: - return "*" in self.not_types + return self.types == [] or "*" in self.not_types def filters_all_senders(self) -> bool: - return "*" in self.not_senders + return self.senders == [] or "*" in self.not_senders def filters_all_rooms(self) -> bool: - return "*" in self.not_rooms + return self.rooms == [] or "*" in self.not_rooms def _check(self, event: FilterEvent) -> bool: """Checks whether the filter matches the given event. @@ -450,8 +453,8 @@ class Filter: if any(map(match_func, disallowed_values)): return False - # Other the event does not match at least one of the allowed values, - # reject it. + # Otherwise if the event does not match at least one of the allowed + # values, reject it. allowed_values = getattr(self, name) if allowed_values is not None: if not any(map(match_func, allowed_values)): diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
index 01ea2b4dab..bd265de536 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi
@@ -1,3 +1,5 @@ +from __future__ import annotations + import argparse from typing import ( Any, diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index 573fa0386f..0f3870bfe1 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py
@@ -136,3 +136,6 @@ class ExperimentalConfig(Config): # Enable room version (and thus applicable push rules from MSC3931/3932) version_id = RoomVersions.MSC1767v10.identifier KNOWN_ROOM_VERSIONS[version_id] = RoomVersions.MSC1767v10 + + # MSC3391: Removing account data. + self.msc3391_enabled = experimental.get("msc3391_enabled", False) diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py
index 0bd83f4010..df8c422043 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py
@@ -117,6 +117,7 @@ OIDC_PROVIDER_CONFIG_SCHEMA = { # to avoid importing authlib here. "enum": ["client_secret_basic", "client_secret_post", "none"], }, + "pkce_method": {"type": "string", "enum": ["auto", "always", "never"]}, "scopes": {"type": "array", "items": {"type": "string"}}, "authorization_endpoint": {"type": "string"}, "token_endpoint": {"type": "string"}, @@ -289,6 +290,7 @@ def _parse_oidc_config_dict( client_secret=oidc_config.get("client_secret"), client_secret_jwt_key=client_secret_jwt_key, client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"), + pkce_method=oidc_config.get("pkce_method", "auto"), scopes=oidc_config.get("scopes", ["openid"]), authorization_endpoint=oidc_config.get("authorization_endpoint"), token_endpoint=oidc_config.get("token_endpoint"), @@ -357,6 +359,10 @@ class OidcProviderConfig: # 'none'. client_auth_method: str + # Whether to enable PKCE when exchanging the authorization & token. + # Valid values are 'auto', 'always', and 'never'. + pkce_method: str + # list of scopes to request scopes: Collection[str] diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py
index fc21d58001..aba7315cf7 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py
@@ -17,10 +17,12 @@ import random from typing import TYPE_CHECKING, Awaitable, Callable, Collection, List, Optional, Tuple from synapse.replication.http.account_data import ( + ReplicationAddRoomAccountDataRestServlet, ReplicationAddTagRestServlet, + ReplicationAddUserAccountDataRestServlet, + ReplicationRemoveRoomAccountDataRestServlet, ReplicationRemoveTagRestServlet, - ReplicationRoomAccountDataRestServlet, - ReplicationUserAccountDataRestServlet, + ReplicationRemoveUserAccountDataRestServlet, ) from synapse.streams import EventSource from synapse.types import JsonDict, StreamKeyType, UserID @@ -41,8 +43,18 @@ class AccountDataHandler: self._instance_name = hs.get_instance_name() self._notifier = hs.get_notifier() - self._user_data_client = ReplicationUserAccountDataRestServlet.make_client(hs) - self._room_data_client = ReplicationRoomAccountDataRestServlet.make_client(hs) + self._add_user_data_client = ( + ReplicationAddUserAccountDataRestServlet.make_client(hs) + ) + self._remove_user_data_client = ( + ReplicationRemoveUserAccountDataRestServlet.make_client(hs) + ) + self._add_room_data_client = ( + ReplicationAddRoomAccountDataRestServlet.make_client(hs) + ) + self._remove_room_data_client = ( + ReplicationRemoveRoomAccountDataRestServlet.make_client(hs) + ) self._add_tag_client = ReplicationAddTagRestServlet.make_client(hs) self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs) self._account_data_writers = hs.config.worker.writers.account_data @@ -112,7 +124,7 @@ class AccountDataHandler: return max_stream_id else: - response = await self._room_data_client( + response = await self._add_room_data_client( instance_name=random.choice(self._account_data_writers), user_id=user_id, room_id=room_id, @@ -121,15 +133,59 @@ class AccountDataHandler: ) return response["max_stream_id"] + async def remove_account_data_for_room( + self, user_id: str, room_id: str, account_data_type: str + ) -> Optional[int]: + """ + Deletes the room account data for the given user and account data type. + + "Deleting" account data merely means setting the content of the account data + to an empty JSON object: {}. + + Args: + user_id: The user ID to remove room account data for. + room_id: The room ID to target. + account_data_type: The account data type to remove. + + Returns: + The maximum stream ID, or None if the room account data item did not exist. + """ + if self._instance_name in self._account_data_writers: + max_stream_id = await self._store.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + if max_stream_id is None: + # The referenced account data did not exist, so no delete occurred. + return None + + self._notifier.on_new_event( + StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id] + ) + + # Notify Synapse modules that the content of the type has changed to an + # empty dictionary. + await self._notify_modules(user_id, room_id, account_data_type, {}) + + return max_stream_id + else: + response = await self._remove_room_data_client( + instance_name=random.choice(self._account_data_writers), + user_id=user_id, + room_id=room_id, + account_data_type=account_data_type, + content={}, + ) + return response["max_stream_id"] + async def add_account_data_for_user( self, user_id: str, account_data_type: str, content: JsonDict ) -> int: """Add some global account_data for a user. Args: - user_id: The user to add a tag for. + user_id: The user to add some account data for. account_data_type: The type of account_data to add. - content: A json object to associate with the tag. + content: The content json dictionary. Returns: The maximum stream ID. @@ -148,7 +204,7 @@ class AccountDataHandler: return max_stream_id else: - response = await self._user_data_client( + response = await self._add_user_data_client( instance_name=random.choice(self._account_data_writers), user_id=user_id, account_data_type=account_data_type, @@ -156,6 +212,45 @@ class AccountDataHandler: ) return response["max_stream_id"] + async def remove_account_data_for_user( + self, user_id: str, account_data_type: str + ) -> Optional[int]: + """Removes a piece of global account_data for a user. + + Args: + user_id: The user to remove account data for. + account_data_type: The type of account_data to remove. + + Returns: + The maximum stream ID, or None if the room account data item did not exist. + """ + + if self._instance_name in self._account_data_writers: + max_stream_id = await self._store.remove_account_data_for_user( + user_id, account_data_type + ) + if max_stream_id is None: + # The referenced account data did not exist, so no delete occurred. + return None + + self._notifier.on_new_event( + StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id] + ) + + # Notify Synapse modules that the content of the type has changed to an + # empty dictionary. + await self._notify_modules(user_id, None, account_data_type, {}) + + return max_stream_id + else: + response = await self._remove_user_data_client( + instance_name=random.choice(self._account_data_writers), + user_id=user_id, + account_data_type=account_data_type, + content={}, + ) + return response["max_stream_id"] + async def add_tag_to_room( self, user_id: str, room_id: str, tag: str, content: JsonDict ) -> int: diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 68a0c8ccb4..89864e1119 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py
@@ -919,6 +919,11 @@ class DeviceListWorkerUpdater: """ # mark_failed_as_stale is not sent. Ensure this doesn't break expectations. assert mark_failed_as_stale + + if not user_ids: + # Shortcut empty requests + return {} + try: return await self._multi_user_device_resync_client(user_ids=user_ids) except SynapseError as err: @@ -946,6 +951,8 @@ class DeviceListWorkerUpdater: A dict with device info as under the "devices" in the result of this request: https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + None when we weren't able to fetch the device info for some reason, + e.g. due to a connection problem. """ return (await self.multi_user_device_resync([user_id]))[user_id] @@ -1250,6 +1257,8 @@ class DeviceListUpdater(DeviceListWorkerUpdater): - A dict with device info as under the "devices" in the result of this request: https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + None when we weren't able to fetch the device info for some reason, + e.g. due to a connection problem. - True iff the resync failed and the device list should be marked as stale. """ logger.debug("Attempting to resync the device list for %s", user_id) diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py
index 03de6a4ba6..0fc829acf7 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py
@@ -36,6 +36,7 @@ from authlib.jose import JsonWebToken, JWTClaims from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError from authlib.oauth2.auth import ClientAuth from authlib.oauth2.rfc6749.parameters import prepare_grant_uri +from authlib.oauth2.rfc7636.challenge import create_s256_code_challenge from authlib.oidc.core import CodeIDToken, UserInfo from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url from jinja2 import Environment, Template @@ -475,6 +476,16 @@ class OidcProvider: ) ) + # If PKCE support is advertised ensure the wanted method is available. + if m.get("code_challenge_methods_supported") is not None: + m.validate_code_challenge_methods_supported() + if "S256" not in m["code_challenge_methods_supported"]: + raise ValueError( + '"S256" not in "code_challenge_methods_supported" ({supported!r})'.format( + supported=m["code_challenge_methods_supported"], + ) + ) + if m.get("response_types_supported") is not None: m.validate_response_types_supported() @@ -602,6 +613,11 @@ class OidcProvider: if self._config.jwks_uri: metadata["jwks_uri"] = self._config.jwks_uri + if self._config.pkce_method == "always": + metadata["code_challenge_methods_supported"] = ["S256"] + elif self._config.pkce_method == "never": + metadata.pop("code_challenge_methods_supported", None) + self._validate_metadata(metadata) return metadata @@ -653,7 +669,7 @@ class OidcProvider: return jwk_set - async def _exchange_code(self, code: str) -> Token: + async def _exchange_code(self, code: str, code_verifier: str) -> Token: """Exchange an authorization code for a token. This calls the ``token_endpoint`` with the authorization code we @@ -666,6 +682,7 @@ class OidcProvider: Args: code: The authorization code we got from the callback. + code_verifier: The PKCE code verifier to send, blank if unused. Returns: A dict containing various tokens. @@ -696,6 +713,8 @@ class OidcProvider: "code": code, "redirect_uri": self._callback_url, } + if code_verifier: + args["code_verifier"] = code_verifier body = urlencode(args, True) # Fill the body/headers with credentials @@ -914,11 +933,14 @@ class OidcProvider: - ``scope``: the list of scopes set in ``oidc_config.scopes`` - ``state``: a random string - ``nonce``: a random string + - ``code_challenge``: a RFC7636 code challenge (if PKCE is supported) - In addition generating a redirect URL, we are setting a cookie with - a signed macaroon token containing the state, the nonce and the - client_redirect_url params. Those are then checked when the client - comes back from the provider. + In addition to generating a redirect URL, we are setting a cookie with + a signed macaroon token containing the state, the nonce, the + client_redirect_url, and (optionally) the code_verifier params. The state, + nonce, and client_redirect_url are then checked when the client comes back + from the provider. The code_verifier is passed back to the server during + the token exchange and compared to the code_challenge sent in this request. Args: request: the incoming request from the browser. @@ -935,10 +957,25 @@ class OidcProvider: state = generate_token() nonce = generate_token() + code_verifier = "" if not client_redirect_url: client_redirect_url = b"" + metadata = await self.load_metadata() + + # Automatically enable PKCE if it is supported. + extra_grant_values = {} + if metadata.get("code_challenge_methods_supported"): + code_verifier = generate_token(48) + + # Note that we verified the server supports S256 earlier (in + # OidcProvider._validate_metadata). + extra_grant_values = { + "code_challenge_method": "S256", + "code_challenge": create_s256_code_challenge(code_verifier), + } + cookie = self._macaroon_generaton.generate_oidc_session_token( state=state, session_data=OidcSessionData( @@ -946,6 +983,7 @@ class OidcProvider: nonce=nonce, client_redirect_url=client_redirect_url.decode(), ui_auth_session_id=ui_auth_session_id or "", + code_verifier=code_verifier, ), ) @@ -966,7 +1004,6 @@ class OidcProvider: ) ) - metadata = await self.load_metadata() authorization_endpoint = metadata.get("authorization_endpoint") return prepare_grant_uri( authorization_endpoint, @@ -976,6 +1013,7 @@ class OidcProvider: scope=self._scopes, state=state, nonce=nonce, + **extra_grant_values, ) async def handle_oidc_callback( @@ -1003,7 +1041,9 @@ class OidcProvider: # Exchange the code with the provider try: logger.debug("Exchanging OAuth2 code for a token") - token = await self._exchange_code(code) + token = await self._exchange_code( + code, code_verifier=session_data.code_verifier + ) except OidcError as e: logger.warning("Could not exchange OAuth2 code: %s", e) self._sso_handler.render_error(request, e.error, e.error_description) @@ -1520,8 +1560,8 @@ env.filters.update( @attr.s(slots=True, frozen=True, auto_attribs=True) class JinjaOidcMappingConfig: - subject_claim: str - picture_claim: str + subject_template: Template + picture_template: Template localpart_template: Optional[Template] display_name_template: Optional[Template] email_template: Optional[Template] @@ -1540,8 +1580,23 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): @staticmethod def parse_config(config: dict) -> JinjaOidcMappingConfig: - subject_claim = config.get("subject_claim", "sub") - picture_claim = config.get("picture_claim", "picture") + def parse_template_config_with_claim( + option_name: str, default_claim: str + ) -> Template: + template_name = f"{option_name}_template" + template = config.get(template_name) + if not template: + # Convert the legacy subject_claim into a template. + claim = config.get(f"{option_name}_claim", default_claim) + template = "{{ user.%s }}" % (claim,) + + try: + return env.from_string(template) + except Exception as e: + raise ConfigError("invalid jinja template", path=[template_name]) from e + + subject_template = parse_template_config_with_claim("subject", "sub") + picture_template = parse_template_config_with_claim("picture", "picture") def parse_template_config(option_name: str) -> Optional[Template]: if option_name not in config: @@ -1574,8 +1629,8 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): raise ConfigError("must be a bool", path=["confirm_localpart"]) return JinjaOidcMappingConfig( - subject_claim=subject_claim, - picture_claim=picture_claim, + subject_template=subject_template, + picture_template=picture_template, localpart_template=localpart_template, display_name_template=display_name_template, email_template=email_template, @@ -1584,7 +1639,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): ) def get_remote_user_id(self, userinfo: UserInfo) -> str: - return userinfo[self._config.subject_claim] + return self._config.subject_template.render(user=userinfo).strip() async def map_user_attributes( self, userinfo: UserInfo, token: Token, failures: int @@ -1615,7 +1670,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): if email: emails.append(email) - picture = userinfo.get("picture") + picture = self._config.picture_template.render(user=userinfo).strip() return UserAttributeDict( localpart=localpart, diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 33115ce488..40f4635c4e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py
@@ -275,7 +275,7 @@ class SearchHandler: ) room_ids = {r.room_id for r in rooms} - # If doing a subset of all rooms seearch, check if any of the rooms + # If doing a subset of all rooms search, check if any of the rooms # are from an upgraded room, and search their contents as well if search_filter.rooms: historical_room_ids: List[str] = [] diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 7d6a653747..6942e06c77 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py
@@ -37,6 +37,7 @@ from synapse.api.presence import UserPresenceState from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events import EventBase from synapse.handlers.relations import BundledAggregations +from synapse.logging import issue9533_logger from synapse.logging.context import current_context from synapse.logging.opentracing import ( SynapseTags, @@ -1402,11 +1403,14 @@ class SyncHandler: logger.debug("Fetching room data") - res = await self._generate_sync_entry_for_rooms( + ( + newly_joined_rooms, + newly_joined_or_invited_or_knocked_users, + newly_left_rooms, + newly_left_users, + ) = await self._generate_sync_entry_for_rooms( sync_result_builder, account_data_by_room ) - newly_joined_rooms, newly_joined_or_invited_or_knocked_users, _, _ = res - _, _, newly_left_rooms, newly_left_users = res block_all_presence_data = ( since_token is None and sync_config.filter_collection.blocks_all_presence() @@ -1623,13 +1627,18 @@ class SyncHandler: } ) - logger.debug( - "Returning %d to-device messages between %d and %d (current token: %d)", - len(messages), - since_stream_id, - stream_id, - now_token.to_device_key, - ) + if messages and issue9533_logger.isEnabledFor(logging.DEBUG): + issue9533_logger.debug( + "Returning to-device messages with stream_ids (%d, %d]; now: %d;" + " msgids: %s", + since_stream_id, + stream_id, + now_token.to_device_key, + [ + message["content"].get(EventContentFields.TO_DEVICE_MSGID) + for message in messages + ], + ) sync_result_builder.now_token = now_token.copy_and_replace( StreamKeyType.TO_DEVICE, stream_id ) @@ -1783,6 +1792,11 @@ class SyncHandler: - newly_left_rooms - newly_left_users """ + + # If the request doesn't care about rooms then nothing to do! + if sync_result_builder.sync_config.filter_collection.blocks_all_rooms(): + return set(), set(), set(), set() + since_token = sync_result_builder.since_token # 1. Start by fetching all ephemeral events in rooms we've joined (if required). diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index 0092a03c59..6f4a934b05 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py
@@ -18,6 +18,7 @@ from typing import ( TYPE_CHECKING, Any, Callable, + Collection, Dict, Generator, Iterable, @@ -126,7 +127,7 @@ from synapse.types import ( from synapse.types.state import StateFilter from synapse.util import Clock from synapse.util.async_helpers import maybe_awaitable -from synapse.util.caches.descriptors import CachedFunction, cached +from synapse.util.caches.descriptors import CachedFunction, cached as _cached from synapse.util.frozenutils import freeze if TYPE_CHECKING: @@ -136,6 +137,7 @@ if TYPE_CHECKING: T = TypeVar("T") P = ParamSpec("P") +F = TypeVar("F", bound=Callable[..., Any]) """ This package defines the 'stable' API which can be used by extension modules which @@ -185,6 +187,42 @@ class UserIpAndAgent: last_seen: int +def cached( + *, + max_entries: int = 1000, + num_args: Optional[int] = None, + uncached_args: Optional[Collection[str]] = None, +) -> Callable[[F], CachedFunction[F]]: + """Returns a decorator that applies a memoizing cache around the function. This + decorator behaves similarly to functools.lru_cache. + + Example: + + @cached() + def foo('a', 'b'): + ... + + Added in Synapse v1.74.0. + + Args: + max_entries: The maximum number of entries in the cache. If the cache is full + and a new entry is added, the least recently accessed entry will be evicted + from the cache. + num_args: The number of positional arguments (excluding `self`) to use as cache + keys. Defaults to all named args of the function. + uncached_args: A list of argument names to not use as the cache key. (`self` is + always ignored.) Cannot be used with num_args. + + Returns: + A decorator that applies a memoizing cache around the function. + """ + return _cached( + max_entries=max_entries, + num_args=num_args, + uncached_args=uncached_args, + ) + + class ModuleApi: """A proxy object that gets passed to various plugin modules so they can register new users etc if necessary. diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py
index 622a1e35c5..bb76c169c6 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py
@@ -26,10 +26,7 @@ def format_push_rules_for_user( """Converts a list of rawrules and a enabled map into nested dictionaries to match the Matrix client-server format for push rules""" - rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = { - "global": {}, - "device": {}, - } + rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {"global": {}} rules["global"] = _add_empty_priority_class_arrays(rules["global"]) diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py
index 310f609153..0edc95977b 100644 --- a/synapse/replication/http/account_data.py +++ b/synapse/replication/http/account_data.py
@@ -28,7 +28,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -class ReplicationUserAccountDataRestServlet(ReplicationEndpoint): +class ReplicationAddUserAccountDataRestServlet(ReplicationEndpoint): """Add user account data on the appropriate account data worker. Request format: @@ -49,7 +49,6 @@ class ReplicationUserAccountDataRestServlet(ReplicationEndpoint): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -73,7 +72,45 @@ class ReplicationUserAccountDataRestServlet(ReplicationEndpoint): return 200, {"max_stream_id": max_stream_id} -class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint): +class ReplicationRemoveUserAccountDataRestServlet(ReplicationEndpoint): + """Remove user account data on the appropriate account data worker. + + Request format: + + POST /_synapse/replication/remove_user_account_data/:user_id/:type + + { + "content": { ... }, + } + + """ + + NAME = "remove_user_account_data" + PATH_ARGS = ("user_id", "account_data_type") + CACHE = False + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self.handler = hs.get_account_data_handler() + + @staticmethod + async def _serialize_payload( # type: ignore[override] + user_id: str, account_data_type: str + ) -> JsonDict: + return {} + + async def _handle_request( # type: ignore[override] + self, request: Request, user_id: str, account_data_type: str + ) -> Tuple[int, JsonDict]: + max_stream_id = await self.handler.remove_account_data_for_user( + user_id, account_data_type + ) + + return 200, {"max_stream_id": max_stream_id} + + +class ReplicationAddRoomAccountDataRestServlet(ReplicationEndpoint): """Add room account data on the appropriate account data worker. Request format: @@ -94,7 +131,6 @@ class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -118,6 +154,44 @@ class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint): return 200, {"max_stream_id": max_stream_id} +class ReplicationRemoveRoomAccountDataRestServlet(ReplicationEndpoint): + """Remove room account data on the appropriate account data worker. + + Request format: + + POST /_synapse/replication/remove_room_account_data/:user_id/:room_id/:account_data_type + + { + "content": { ... }, + } + + """ + + NAME = "remove_room_account_data" + PATH_ARGS = ("user_id", "room_id", "account_data_type") + CACHE = False + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self.handler = hs.get_account_data_handler() + + @staticmethod + async def _serialize_payload( # type: ignore[override] + user_id: str, room_id: str, account_data_type: str, content: JsonDict + ) -> JsonDict: + return {} + + async def _handle_request( # type: ignore[override] + self, request: Request, user_id: str, room_id: str, account_data_type: str + ) -> Tuple[int, JsonDict]: + max_stream_id = await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + + return 200, {"max_stream_id": max_stream_id} + + class ReplicationAddTagRestServlet(ReplicationEndpoint): """Add tag on the appropriate account data worker. @@ -139,7 +213,6 @@ class ReplicationAddTagRestServlet(ReplicationEndpoint): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -186,7 +259,6 @@ class ReplicationRemoveTagRestServlet(ReplicationEndpoint): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: # type: ignore[override] @@ -206,7 +278,11 @@ class ReplicationRemoveTagRestServlet(ReplicationEndpoint): def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - ReplicationUserAccountDataRestServlet(hs).register(http_server) - ReplicationRoomAccountDataRestServlet(hs).register(http_server) + ReplicationAddUserAccountDataRestServlet(hs).register(http_server) + ReplicationAddRoomAccountDataRestServlet(hs).register(http_server) ReplicationAddTagRestServlet(hs).register(http_server) ReplicationRemoveTagRestServlet(hs).register(http_server) + + if hs.config.experimental.msc3391_enabled: + ReplicationRemoveUserAccountDataRestServlet(hs).register(http_server) + ReplicationRemoveRoomAccountDataRestServlet(hs).register(http_server) diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 658d89210d..b5e40da533 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py
@@ -152,6 +152,9 @@ class ReplicationDataHandler: rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row. """ self.store.process_replication_rows(stream_name, instance_name, token, rows) + # NOTE: this must be called after process_replication_rows to ensure any + # cache invalidations are first handled before any stream ID advances. + self.store.process_replication_position(stream_name, instance_name, token) if self.send_handler: await self.send_handler.process_replication_rows(stream_name, token, rows) diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py
index c1781bc814..232f3a976d 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py
@@ -338,6 +338,11 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): ) async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + if not self.hs.config.registration.enable_3pid_changes: + raise SynapseError( + 400, "3PID changes are disabled on this server", Codes.FORBIDDEN + ) + if not self.config.email.can_verify_email: logger.warning( "Adding emails have been disabled due to lack of an email config" diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py
index f13970b898..e805196fec 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py
@@ -41,6 +41,7 @@ class AccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.store = hs.get_datastores().main self.handler = hs.get_account_data_handler() @@ -54,6 +55,16 @@ class AccountDataServlet(RestServlet): body = parse_json_object_from_request(request) + # If experimental support for MSC3391 is enabled, then providing an empty dict + # as the value for an account data type should be functionally equivalent to + # calling the DELETE method on the same type. + if self._hs.config.experimental.msc3391_enabled: + if body == {}: + await self.handler.remove_account_data_for_user( + user_id, account_data_type + ) + return 200, {} + await self.handler.add_account_data_for_user(user_id, account_data_type, body) return 200, {} @@ -72,9 +83,48 @@ class AccountDataServlet(RestServlet): if event is None: raise NotFoundError("Account data not found") + # If experimental support for MSC3391 is enabled, then this endpoint should + # return a 404 if the content for an account data type is an empty dict. + if self._hs.config.experimental.msc3391_enabled and event == {}: + raise NotFoundError("Account data not found") + return 200, event +class UnstableAccountDataServlet(RestServlet): + """ + Contains an unstable endpoint for removing user account data, as specified by + MSC3391. If that MSC is accepted, this code should have unstable prefixes removed + and become incorporated into AccountDataServlet above. + """ + + PATTERNS = client_patterns( + "/org.matrix.msc3391/user/(?P<user_id>[^/]*)" + "/account_data/(?P<account_data_type>[^/]*)", + unstable=True, + releases=(), + ) + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.handler = hs.get_account_data_handler() + + async def on_DELETE( + self, + request: SynapseRequest, + user_id: str, + account_data_type: str, + ) -> Tuple[int, JsonDict]: + requester = await self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): + raise AuthError(403, "Cannot delete account data for other users.") + + await self.handler.remove_account_data_for_user(user_id, account_data_type) + + return 200, {} + + class RoomAccountDataServlet(RestServlet): """ PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1 @@ -89,6 +139,7 @@ class RoomAccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.store = hs.get_datastores().main self.handler = hs.get_account_data_handler() @@ -121,6 +172,16 @@ class RoomAccountDataServlet(RestServlet): Codes.BAD_JSON, ) + # If experimental support for MSC3391 is enabled, then providing an empty dict + # as the value for an account data type should be functionally equivalent to + # calling the DELETE method on the same type. + if self._hs.config.experimental.msc3391_enabled: + if body == {}: + await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + return 200, {} + await self.handler.add_account_data_to_room( user_id, room_id, account_data_type, body ) @@ -152,9 +213,63 @@ class RoomAccountDataServlet(RestServlet): if event is None: raise NotFoundError("Room account data not found") + # If experimental support for MSC3391 is enabled, then this endpoint should + # return a 404 if the content for an account data type is an empty dict. + if self._hs.config.experimental.msc3391_enabled and event == {}: + raise NotFoundError("Room account data not found") + return 200, event +class UnstableRoomAccountDataServlet(RestServlet): + """ + Contains an unstable endpoint for removing room account data, as specified by + MSC3391. If that MSC is accepted, this code should have unstable prefixes removed + and become incorporated into RoomAccountDataServlet above. + """ + + PATTERNS = client_patterns( + "/org.matrix.msc3391/user/(?P<user_id>[^/]*)" + "/rooms/(?P<room_id>[^/]*)" + "/account_data/(?P<account_data_type>[^/]*)", + unstable=True, + releases=(), + ) + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.handler = hs.get_account_data_handler() + + async def on_DELETE( + self, + request: SynapseRequest, + user_id: str, + room_id: str, + account_data_type: str, + ) -> Tuple[int, JsonDict]: + requester = await self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): + raise AuthError(403, "Cannot delete account data for other users.") + + if not RoomID.is_valid(room_id): + raise SynapseError( + 400, + f"{room_id} is not a valid room ID", + Codes.INVALID_PARAM, + ) + + await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + + return 200, {} + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: AccountDataServlet(hs).register(http_server) RoomAccountDataServlet(hs).register(http_server) + + if hs.config.experimental.msc3391_enabled: + UnstableAccountDataServlet(hs).register(http_server) + UnstableRoomAccountDataServlet(hs).register(http_server) diff --git a/synapse/rest/media/v1/oembed.py b/synapse/rest/media/v1/oembed.py
index 827afd868d..a3738a6250 100644 --- a/synapse/rest/media/v1/oembed.py +++ b/synapse/rest/media/v1/oembed.py
@@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import html import logging import urllib.parse from typing import TYPE_CHECKING, List, Optional @@ -161,7 +162,9 @@ class OEmbedProvider: title = oembed.get("title") if title and isinstance(title, str): - open_graph_response["og:title"] = title + # A common WordPress plug-in seems to incorrectly escape entities + # in the oEmbed response. + open_graph_response["og:title"] = html.unescape(title) author_name = oembed.get("author_name") if not isinstance(author_name, str): @@ -180,9 +183,9 @@ class OEmbedProvider: # Process each type separately. oembed_type = oembed.get("type") if oembed_type == "rich": - html = oembed.get("html") - if isinstance(html, str): - calc_description_and_urls(open_graph_response, html) + html_str = oembed.get("html") + if isinstance(html_str, str): + calc_description_and_urls(open_graph_response, html_str) elif oembed_type == "photo": # If this is a photo, use the full image, not the thumbnail. @@ -192,8 +195,8 @@ class OEmbedProvider: elif oembed_type == "video": open_graph_response["og:type"] = "video.other" - html = oembed.get("html") - if html and isinstance(html, str): + html_str = oembed.get("html") + if html_str and isinstance(html_str, str): calc_description_and_urls(open_graph_response, oembed["html"]) for size in ("width", "height"): val = oembed.get(size) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 69abf6fa87..41d9111019 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py
@@ -57,7 +57,22 @@ class SQLBaseStore(metaclass=ABCMeta): token: int, rows: Iterable[Any], ) -> None: - pass + """ + Used by storage classes to invalidate caches based on incoming replication data. These + must not update any ID generators, use `process_replication_position`. + """ + + def process_replication_position( # noqa: B027 (no-op by design) + self, + stream_name: str, + instance_name: str, + token: int, + ) -> None: + """ + Used by storage classes to advance ID generators based on incoming replication data. This + is called after process_replication_rows such that caches are invalidated before any token + positions advance. + """ def _invalidate_state_caches( self, room_id: str, members_changed: Collection[str] diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 0b29e67b94..88479a16db 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py
@@ -1762,7 +1762,8 @@ class DatabasePool: desc: description of the transaction, for logging and metrics Returns: - A list of dictionaries. + A list of dictionaries, one per result row, each a mapping between the + column names from `retcols` and that column's value for the row. """ return await self.runInteraction( desc, @@ -1791,6 +1792,10 @@ class DatabasePool: column names and values to select the rows with, or None to not apply a WHERE clause. retcols: the names of the columns to return + + Returns: + A list of dictionaries, one per result row, each a mapping between the + column names from `retcols` and that column's value for the row. """ if keyvalues: sql = "SELECT %s FROM %s WHERE %s" % ( @@ -1898,6 +1903,19 @@ class DatabasePool: updatevalues: Dict[str, Any], desc: str, ) -> int: + """ + Update rows in the given database table. + If the given keyvalues don't match anything, nothing will be updated. + + Args: + table: The database table to update. + keyvalues: A mapping of column name to value to match rows on. + updatevalues: A mapping of column name to value to replace in any matched rows. + desc: description of the transaction, for logging and metrics. + + Returns: + The number of rows that were updated. Will be 0 if no matching rows were found. + """ return await self.runInteraction( desc, self.simple_update_txn, table, keyvalues, updatevalues ) @@ -1909,6 +1927,19 @@ class DatabasePool: keyvalues: Dict[str, Any], updatevalues: Dict[str, Any], ) -> int: + """ + Update rows in the given database table. + If the given keyvalues don't match anything, nothing will be updated. + + Args: + txn: The database transaction object. + table: The database table to update. + keyvalues: A mapping of column name to value to match rows on. + updatevalues: A mapping of column name to value to replace in any matched rows. + + Returns: + The number of rows that were updated. Will be 0 if no matching rows were found. + """ if keyvalues: where = "WHERE %s" % " AND ".join("%s = ?" % k for k in keyvalues.keys()) else: diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py
index 07908c41d9..86032897f5 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py
@@ -123,7 +123,11 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) async def get_account_data_for_user( self, user_id: str ) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]: - """Get all the client account_data for a user. + """ + Get all the client account_data for a user. + + If experimental MSC3391 support is enabled, any entries with an empty + content body are excluded; as this means they have been deleted. Args: user_id: The user to get the account_data for. @@ -135,27 +139,48 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) def get_account_data_for_user_txn( txn: LoggingTransaction, ) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]: - rows = self.db_pool.simple_select_list_txn( - txn, - "account_data", - {"user_id": user_id}, - ["account_data_type", "content"], - ) + # The 'content != '{}' condition below prevents us from using + # `simple_select_list_txn` here, as it doesn't support conditions + # other than 'equals'. + sql = """ + SELECT account_data_type, content FROM account_data + WHERE user_id = ? + """ + + # If experimental MSC3391 support is enabled, then account data entries + # with an empty content are considered "deleted". So skip adding them to + # the results. + if self.hs.config.experimental.msc3391_enabled: + sql += " AND content != '{}'" + + txn.execute(sql, (user_id,)) + rows = self.db_pool.cursor_to_dict(txn) global_account_data = { row["account_data_type"]: db_to_json(row["content"]) for row in rows } - rows = self.db_pool.simple_select_list_txn( - txn, - "room_account_data", - {"user_id": user_id}, - ["room_id", "account_data_type", "content"], - ) + # The 'content != '{}' condition below prevents us from using + # `simple_select_list_txn` here, as it doesn't support conditions + # other than 'equals'. + sql = """ + SELECT room_id, account_data_type, content FROM room_account_data + WHERE user_id = ? + """ + + # If experimental MSC3391 support is enabled, then account data entries + # with an empty content are considered "deleted". So skip adding them to + # the results. + if self.hs.config.experimental.msc3391_enabled: + sql += " AND content != '{}'" + + txn.execute(sql, (user_id,)) + rows = self.db_pool.cursor_to_dict(txn) by_room: Dict[str, Dict[str, JsonDict]] = {} for row in rows: room_data = by_room.setdefault(row["room_id"], {}) + room_data[row["account_data_type"]] = db_to_json(row["content"]) return global_account_data, by_room @@ -411,10 +436,7 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) token: int, rows: Iterable[Any], ) -> None: - if stream_name == TagAccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) - elif stream_name == AccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) + if stream_name == AccountDataStream.NAME: for row in rows: if not row.room_id: self.get_global_account_data_by_type_for_user.invalidate( @@ -429,6 +451,15 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == TagAccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + elif stream_name == AccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + async def add_account_data_to_room( self, user_id: str, room_id: str, account_data_type: str, content: JsonDict ) -> int: @@ -469,6 +500,72 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) return self._account_data_id_gen.get_current_token() + async def remove_account_data_for_room( + self, user_id: str, room_id: str, account_data_type: str + ) -> Optional[int]: + """Delete the room account data for the user of a given type. + + Args: + user_id: The user to remove account_data for. + room_id: The room ID to scope the request to. + account_data_type: The account data type to delete. + + Returns: + The maximum stream position, or None if there was no matching room account + data to delete. + """ + assert self._can_write_to_account_data + assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator) + + def _remove_account_data_for_room_txn( + txn: LoggingTransaction, next_id: int + ) -> bool: + """ + Args: + txn: The transaction object. + next_id: The stream_id to update any existing rows to. + + Returns: + True if an entry in room_account_data had its content set to '{}', + otherwise False. This informs callers of whether there actually was an + existing room account data entry to delete, or if the call was a no-op. + """ + # We can't use `simple_update` as it doesn't have the ability to specify + # where clauses other than '=', which we need for `content != '{}'` below. + sql = """ + UPDATE room_account_data + SET stream_id = ?, content = '{}' + WHERE user_id = ? + AND room_id = ? + AND account_data_type = ? + AND content != '{}' + """ + txn.execute( + sql, + (next_id, user_id, room_id, account_data_type), + ) + # Return true if any rows were updated. + return txn.rowcount != 0 + + async with self._account_data_id_gen.get_next() as next_id: + row_updated = await self.db_pool.runInteraction( + "remove_account_data_for_room", + _remove_account_data_for_room_txn, + next_id, + ) + + if not row_updated: + return None + + self._account_data_stream_cache.entity_has_changed(user_id, next_id) + self.get_account_data_for_user.invalidate((user_id,)) + self.get_account_data_for_room.invalidate((user_id, room_id)) + self.get_account_data_for_room_and_type.prefill( + (user_id, room_id, account_data_type), {} + ) + + return self._account_data_id_gen.get_current_token() + async def add_account_data_for_user( self, user_id: str, account_data_type: str, content: JsonDict ) -> int: @@ -569,6 +666,108 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,)) self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,)) + async def remove_account_data_for_user( + self, + user_id: str, + account_data_type: str, + ) -> Optional[int]: + """ + Delete a single piece of user account data by type. + + A "delete" is performed by updating a potentially existing row in the + "account_data" database table for (user_id, account_data_type) and + setting its content to "{}". + + Args: + user_id: The user ID to modify the account data of. + account_data_type: The type to remove. + + Returns: + The maximum stream position, or None if there was no matching account data + to delete. + """ + assert self._can_write_to_account_data + assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator) + + def _remove_account_data_for_user_txn( + txn: LoggingTransaction, next_id: int + ) -> bool: + """ + Args: + txn: The transaction object. + next_id: The stream_id to update any existing rows to. + + Returns: + True if an entry in account_data had its content set to '{}', otherwise + False. This informs callers of whether there actually was an existing + account data entry to delete, or if the call was a no-op. + """ + # We can't use `simple_update` as it doesn't have the ability to specify + # where clauses other than '=', which we need for `content != '{}'` below. + sql = """ + UPDATE account_data + SET stream_id = ?, content = '{}' + WHERE user_id = ? + AND account_data_type = ? + AND content != '{}' + """ + txn.execute(sql, (next_id, user_id, account_data_type)) + if txn.rowcount == 0: + # We didn't update any rows. This means that there was no matching room + # account data entry to delete in the first place. + return False + + # Ignored users get denormalized into a separate table as an optimisation. + if account_data_type == AccountDataTypes.IGNORED_USER_LIST: + # If this method was called with the ignored users account data type, we + # simply delete all ignored users. + + # First pull all the users that this user ignores. + previously_ignored_users = set( + self.db_pool.simple_select_onecol_txn( + txn, + table="ignored_users", + keyvalues={"ignorer_user_id": user_id}, + retcol="ignored_user_id", + ) + ) + + # Then delete them from the database. + self.db_pool.simple_delete_txn( + txn, + table="ignored_users", + keyvalues={"ignorer_user_id": user_id}, + ) + + # Invalidate the cache for ignored users which were removed. + for ignored_user_id in previously_ignored_users: + self._invalidate_cache_and_stream( + txn, self.ignored_by, (ignored_user_id,) + ) + + # Invalidate for this user the cache tracking ignored users. + self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,)) + + return True + + async with self._account_data_id_gen.get_next() as next_id: + row_updated = await self.db_pool.runInteraction( + "remove_account_data_for_user", + _remove_account_data_for_user_txn, + next_id, + ) + + if not row_updated: + return None + + self._account_data_stream_cache.entity_has_changed(user_id, next_id) + self.get_account_data_for_user.invalidate((user_id,)) + self.get_global_account_data_by_type_for_user.prefill( + (user_id, account_data_type), {} + ) + + return self._account_data_id_gen.get_current_token() + async def purge_account_data_for_user(self, user_id: str) -> None: """ Removes ALL the account data for a user. diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py
index a58668a380..2179a8bf59 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py
@@ -164,9 +164,6 @@ class CacheInvalidationWorkerStore(SQLBaseStore): backfilled=True, ) elif stream_name == CachesStream.NAME: - if self._cache_id_gen: - self._cache_id_gen.advance(instance_name, token) - for row in rows: if row.cache_func == CURRENT_STATE_CACHE_NAME: if row.keys is None: @@ -182,6 +179,14 @@ class CacheInvalidationWorkerStore(SQLBaseStore): super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == CachesStream.NAME: + if self._cache_id_gen: + self._cache_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: data = row.data diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 50899b2949..2440ac03f7 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py
@@ -157,6 +157,13 @@ class DeviceInboxWorkerStore(SQLBaseStore): ) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == ToDeviceStream.NAME: + self._device_inbox_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def get_to_device_stream_token(self) -> int: return self._device_inbox_id_gen.get_current_token() diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index a921332cb0..b067664473 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py
@@ -162,14 +162,21 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore): self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any] ) -> None: if stream_name == DeviceListsStream.NAME: - self._device_list_id_gen.advance(instance_name, token) self._invalidate_caches_for_devices(token, rows) elif stream_name == UserSignatureStream.NAME: - self._device_list_id_gen.advance(instance_name, token) for row in rows: self._user_signature_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == DeviceListsStream.NAME: + self._device_list_id_gen.advance(instance_name, token) + elif stream_name == UserSignatureStream.NAME: + self._device_list_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _invalidate_caches_for_devices( self, token: int, rows: Iterable[DeviceListsStream.DeviceListsStreamRow] ) -> None: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index f80b494edb..90aa4e01bf 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py
@@ -388,11 +388,7 @@ class EventsWorkerStore(SQLBaseStore): token: int, rows: Iterable[Any], ) -> None: - if stream_name == EventsStream.NAME: - self._stream_id_gen.advance(instance_name, token) - elif stream_name == BackfillStream.NAME: - self._backfill_id_gen.advance(instance_name, -token) - elif stream_name == UnPartialStatedEventStream.NAME: + if stream_name == UnPartialStatedEventStream.NAME: for row in rows: assert isinstance(row, UnPartialStatedEventStreamRow) @@ -405,6 +401,15 @@ class EventsWorkerStore(SQLBaseStore): super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == EventsStream.NAME: + self._stream_id_gen.advance(instance_name, token) + elif stream_name == BackfillStream.NAME: + self._backfill_id_gen.advance(instance_name, -token) + super().process_replication_position(stream_name, instance_name, token) + async def have_censored_event(self, event_id: str) -> bool: """Check if an event has been censored, i.e. if the content of the event has been erased from the database due to a redaction. diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py
index 9769a18a9d..7b60815043 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py
@@ -439,8 +439,14 @@ class PresenceStore(PresenceBackgroundUpdateStore, CacheInvalidationWorkerStore) rows: Iterable[Any], ) -> None: if stream_name == PresenceStream.NAME: - self._presence_id_gen.advance(instance_name, token) for row in rows: self.presence_stream_cache.entity_has_changed(row.user_id, token) self._get_presence_for_user.invalidate((row.user_id,)) return super().process_replication_rows(stream_name, instance_name, token, rows) + + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == PresenceStream.NAME: + self._presence_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py
index d4c64c46ad..d4e4b777da 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py
@@ -154,6 +154,13 @@ class PushRulesWorkerStore( self.push_rules_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == PushRulesStream.NAME: + self._push_rules_stream_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + @cached(max_entries=5000) async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: rows = await self.db_pool.simple_select_list( diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py
index 40fd781a6a..7f24a3b6ec 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py
@@ -111,12 +111,12 @@ class PusherWorkerStore(SQLBaseStore): def get_pushers_stream_token(self) -> int: return self._pushers_id_gen.get_current_token() - def process_replication_rows( - self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any] + def process_replication_position( + self, stream_name: str, instance_name: str, token: int ) -> None: if stream_name == PushersStream.NAME: self._pushers_id_gen.advance(instance_name, token) - return super().process_replication_rows(stream_name, instance_name, token, rows) + super().process_replication_position(stream_name, instance_name, token) async def get_pushers_by_app_id_and_pushkey( self, app_id: str, pushkey: str diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index e06725f69c..86f5bce5f0 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py
@@ -588,6 +588,13 @@ class ReceiptsWorkerStore(SQLBaseStore): return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == ReceiptsStream.NAME: + self._receipts_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _insert_linearized_receipt_txn( self, txn: LoggingTransaction, diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py
index b0f5de67a3..e23c927e02 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py
@@ -300,13 +300,19 @@ class TagsWorkerStore(AccountDataWorkerStore): rows: Iterable[Any], ) -> None: if stream_name == TagAccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) for row in rows: self.get_tags_for_user.invalidate((row.user_id,)) self._account_data_stream_cache.entity_has_changed(row.user_id, token) super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == TagAccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + class TagsStore(TagsWorkerStore): pass diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py
index 5df03d3ddc..644c341e8c 100644 --- a/synapse/util/macaroons.py +++ b/synapse/util/macaroons.py
@@ -110,6 +110,9 @@ class OidcSessionData: ui_auth_session_id: str """The session ID of the ongoing UI Auth ("" if this is a login)""" + code_verifier: str + """The random string used in the RFC7636 code challenge ("" if PKCE is not being used).""" + class MacaroonGenerator: def __init__(self, clock: Clock, location: str, secret_key: bytes): @@ -187,6 +190,7 @@ class MacaroonGenerator: macaroon.add_first_party_caveat( f"ui_auth_session_id = {session_data.ui_auth_session_id}" ) + macaroon.add_first_party_caveat(f"code_verifier = {session_data.code_verifier}") macaroon.add_first_party_caveat(f"time < {expiry}") return macaroon.serialize() @@ -278,6 +282,7 @@ class MacaroonGenerator: v.satisfy_general(lambda c: c.startswith("idp_id = ")) v.satisfy_general(lambda c: c.startswith("client_redirect_url = ")) v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = ")) + v.satisfy_general(lambda c: c.startswith("code_verifier = ")) satisfy_expiry(v, self._clock.time_msec) v.verify(macaroon, self._secret_key) @@ -287,11 +292,13 @@ class MacaroonGenerator: idp_id = get_value_from_macaroon(macaroon, "idp_id") client_redirect_url = get_value_from_macaroon(macaroon, "client_redirect_url") ui_auth_session_id = get_value_from_macaroon(macaroon, "ui_auth_session_id") + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") return OidcSessionData( nonce=nonce, idp_id=idp_id, client_redirect_url=client_redirect_url, ui_auth_session_id=ui_auth_session_id, + code_verifier=code_verifier, ) def _generate_base_macaroon(self, type: MacaroonType) -> pymacaroons.Macaroon: diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py
index 8fa710c9dc..2b0972eee8 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py
@@ -33,12 +33,12 @@ HOSTNAME = "domain" class EventSigningTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.signing_key: SigningKey = decode_signing_key_base64( KEY_ALG, KEY_VER, SIGNING_KEY_SEED ) - def test_sign_minimal(self): + def test_sign_minimal(self) -> None: event_dict = { "event_id": "$0:domain", "origin": "domain", @@ -69,7 +69,7 @@ class EventSigningTestCase(unittest.TestCase): "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", ) - def test_sign_message(self): + def test_sign_message(self) -> None: event_dict = { "content": {"body": "Here is the message content"}, "event_id": "$0:domain", diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index f7c309cad0..0e8af2da54 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py
@@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import time -from typing import Dict, List +from typing import Any, Dict, List, Optional, cast from unittest.mock import Mock import attr @@ -20,10 +20,11 @@ import canonicaljson import signedjson.key import signedjson.sign from signedjson.key import encode_verify_key_base64, get_verify_key -from signedjson.types import SigningKey +from signedjson.types import SigningKey, VerifyKey from twisted.internet import defer from twisted.internet.defer import Deferred, ensureDeferred +from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import SynapseError from synapse.crypto import keyring @@ -33,11 +34,15 @@ from synapse.crypto.keyring import ( StoreKeyFetcher, ) from synapse.logging.context import ( + ContextRequest, LoggingContext, current_context, make_deferred_yieldable, ) +from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult +from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest from tests.test_utils import make_awaitable @@ -45,15 +50,15 @@ from tests.unittest import logcontext_clean, override_config class MockPerspectiveServer: - def __init__(self): + def __init__(self) -> None: self.server_name = "mock_server" - self.key = signedjson.key.generate_signing_key(0) + self.key = signedjson.key.generate_signing_key("0") - def get_verify_keys(self): + def get_verify_keys(self) -> Dict[str, str]: vk = signedjson.key.get_verify_key(self.key) return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)} - def get_signed_key(self, server_name, verify_key): + def get_signed_key(self, server_name: str, verify_key: VerifyKey) -> JsonDict: key_id = "%s:%s" % (verify_key.alg, verify_key.version) res = { "server_name": server_name, @@ -64,34 +69,36 @@ class MockPerspectiveServer: self.sign_response(res) return res - def sign_response(self, res): + def sign_response(self, res: JsonDict) -> None: signedjson.sign.sign_json(res, self.server_name, self.key) -@attr.s(slots=True) +@attr.s(slots=True, auto_attribs=True) class FakeRequest: - id = attr.ib() + id: str @logcontext_clean class KeyringTestCase(unittest.HomeserverTestCase): - def check_context(self, val, expected): + def check_context( + self, val: ContextRequest, expected: Optional[ContextRequest] + ) -> ContextRequest: self.assertEqual(getattr(current_context(), "request", None), expected) return val - def test_verify_json_objects_for_server_awaits_previous_requests(self): + def test_verify_json_objects_for_server_awaits_previous_requests(self) -> None: mock_fetcher = Mock() mock_fetcher.get_keys = Mock() kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) # a signed object that we are going to try to validate - key1 = signedjson.key.generate_signing_key(1) - json1 = {} + key1 = signedjson.key.generate_signing_key("1") + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server10", key1) # start off a first set of lookups. We make the mock fetcher block until this # deferred completes. - first_lookup_deferred = Deferred() + first_lookup_deferred: "Deferred[None]" = Deferred() async def first_lookup_fetch( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -106,8 +113,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): mock_fetcher.get_keys.side_effect = first_lookup_fetch - async def first_lookup(): - with LoggingContext("context_11", request=FakeRequest("context_11")): + async def first_lookup() -> None: + with LoggingContext( + "context_11", request=cast(ContextRequest, FakeRequest("context_11")) + ): res_deferreds = kr.verify_json_objects_for_server( [("server10", json1, 0), ("server11", {}, 0)] ) @@ -144,8 +153,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): mock_fetcher.get_keys.side_effect = second_lookup_fetch second_lookup_state = [0] - async def second_lookup(): - with LoggingContext("context_12", request=FakeRequest("context_12")): + async def second_lookup() -> None: + with LoggingContext( + "context_12", request=cast(ContextRequest, FakeRequest("context_12")) + ): res_deferreds_2 = kr.verify_json_objects_for_server( [ ( @@ -175,10 +186,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.get_success(d0) self.get_success(d2) - def test_verify_json_for_server(self): + def test_verify_json_for_server(self) -> None: kr = keyring.Keyring(self.hs) - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") r = self.hs.get_datastores().main.store_server_verify_keys( "server9", time.time() * 1000, @@ -186,7 +197,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): ) self.get_success(r) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object @@ -198,12 +209,12 @@ class KeyringTestCase(unittest.HomeserverTestCase): # self.assertFalse(d.called) self.get_success(d) - def test_verify_for_local_server(self): + def test_verify_for_local_server(self) -> None: """Ensure that locally signed JSON can be verified without fetching keys over federation """ kr = keyring.Keyring(self.hs) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.hs.signing_key) # Test that verify_json_for_server succeeds on a object signed by ourselves @@ -216,22 +227,24 @@ class KeyringTestCase(unittest.HomeserverTestCase): { "old_signing_keys": { f"{OLD_KEY.alg}:{OLD_KEY.version}": { - "key": encode_verify_key_base64(OLD_KEY.verify_key), + "key": encode_verify_key_base64( + signedjson.key.get_verify_key(OLD_KEY) + ), "expired_ts": 1000, } } } ) - def test_verify_for_local_server_old_key(self): + def test_verify_for_local_server_old_key(self) -> None: """Can also use keys in old_signing_keys for verification""" - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.OLD_KEY) kr = keyring.Keyring(self.hs) d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d) - def test_verify_for_local_server_unknown_key(self): + def test_verify_for_local_server_unknown_key(self) -> None: """Local keys that we no longer have should be fetched via the fetcher""" # the key we'll sign things with (nb, not known to the Keyring) @@ -253,14 +266,14 @@ class KeyringTestCase(unittest.HomeserverTestCase): ) # sign the json - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, key2) # ... and check we can verify it. d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d) - def test_verify_json_for_server_with_null_valid_until_ms(self): + def test_verify_json_for_server_with_null_valid_until_ms(self) -> None: """Tests that we correctly handle key requests for keys we've stored with a null `ts_valid_until_ms` """ @@ -271,15 +284,18 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher) ) - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") r = self.hs.get_datastores().main.store_server_verify_keys( "server9", time.time() * 1000, - [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], + # None is not a valid value in FetchKeyResult, but we're abusing this + # API to insert null values into the database. The nulls get converted + # to 0 when fetched in KeyStore.get_server_verify_keys. + [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], # type: ignore[arg-type] ) self.get_success(r) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object @@ -304,9 +320,9 @@ class KeyringTestCase(unittest.HomeserverTestCase): ) self.get_success(d) - def test_verify_json_dedupes_key_requests(self): + def test_verify_json_dedupes_key_requests(self) -> None: """Two requests for the same key should be deduped.""" - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") async def get_keys( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -322,7 +338,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): mock_fetcher.get_keys = Mock(side_effect=get_keys) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server1", key1) # the first request should succeed; the second should fail because the key @@ -346,9 +362,9 @@ class KeyringTestCase(unittest.HomeserverTestCase): # there should have been a single call to the fetcher mock_fetcher.get_keys.assert_called_once() - def test_verify_json_falls_back_to_other_fetchers(self): + def test_verify_json_falls_back_to_other_fetchers(self) -> None: """If the first fetcher cannot provide a recent enough key, we fall back""" - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") async def get_keys1( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -372,7 +388,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): mock_fetcher2.get_keys = Mock(side_effect=get_keys2) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2)) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server1", key1) results = kr.verify_json_objects_for_server( @@ -402,12 +418,12 @@ class KeyringTestCase(unittest.HomeserverTestCase): @logcontext_clean class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.http_client = Mock() hs = self.setup_test_homeserver(federation_http_client=self.http_client) return hs - def test_get_keys_from_server(self): + def test_get_keys_from_server(self) -> None: # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -431,7 +447,7 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): } signedjson.sign.sign_json(response, SERVER_NAME, testkey) - async def get_json(destination, path, **kwargs): + async def get_json(destination: str, path: str, **kwargs: Any) -> JsonDict: self.assertEqual(destination, SERVER_NAME) self.assertEqual(path, "/_matrix/key/v2/server") return response @@ -471,7 +487,7 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() @@ -522,7 +538,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): Tell the mock http client to expect a perspectives-server key query """ - async def post_json(destination, path, data, **kwargs): + async def post_json( + destination: str, path: str, data: JsonDict, **kwargs: Any + ) -> JsonDict: self.assertEqual(destination, self.mock_perspective_server.server_name) self.assertEqual(path, "/_matrix/key/v2/query") @@ -533,7 +551,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.http_client.post_json.side_effect = post_json - def test_get_keys_from_perspectives(self): + def test_get_keys_from_perspectives(self) -> None: # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -578,7 +596,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) - def test_get_multiple_keys_from_perspectives(self): + def test_get_multiple_keys_from_perspectives(self) -> None: """Check that we can correctly request multiple keys for the same server""" fetcher = PerspectivesKeyFetcher(self.hs) @@ -606,7 +624,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): VALID_UNTIL_TS, ) - async def post_json(destination, path, data, **kwargs): + async def post_json( + destination: str, path: str, data: JsonDict, **kwargs: str + ) -> JsonDict: self.assertEqual(destination, self.mock_perspective_server.server_name) self.assertEqual(path, "/_matrix/key/v2/query") @@ -648,7 +668,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): # finally, ensure that only one request was sent self.assertEqual(self.http_client.post_json.call_count, 1) - def test_get_perspectives_own_key(self): + def test_get_perspectives_own_key(self) -> None: """Check that we can get the perspectives server's own keys This is slightly complicated by the fact that the perspectives server may @@ -697,7 +717,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) - def test_invalid_perspectives_responses(self): + def test_invalid_perspectives_responses(self) -> None: """Check that invalid responses from the perspectives server are rejected""" # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -708,12 +728,12 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): testverifykey_id = "ed25519:ver1" VALID_UNTIL_TS = 200 * 1000 - def build_response(): + def build_response() -> dict: return self.build_perspectives_response( SERVER_NAME, testkey, VALID_UNTIL_TS ) - def get_key_from_perspectives(response): + def get_key_from_perspectives(response: JsonDict) -> Dict[str, FetchKeyResult]: fetcher = PerspectivesKeyFetcher(self.hs) self.expect_outgoing_key_query(SERVER_NAME, "key1", response) return self.get_success(fetcher.get_keys(SERVER_NAME, ["key1"], 0)) @@ -737,6 +757,6 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig") -def get_key_id(key): +def get_key_id(key: SigningKey) -> str: """Get the matrix ID tag for a given SigningKey or VerifyKey""" return "%s:%s" % (key.alg, key.version) diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
index 49a1842b5c..adddbd002f 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py
@@ -396,6 +396,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertEqual(params["client_id"], [CLIENT_ID]) self.assertEqual(len(params["state"]), 1) self.assertEqual(len(params["nonce"]), 1) + self.assertNotIn("code_challenge", params) # Check what is in the cookies self.assertEqual(len(req.cookies), 2) # two cookies @@ -411,13 +412,118 @@ class OidcHandlerTestCase(HomeserverTestCase): macaroon = pymacaroons.Macaroon.deserialize(cookie) state = get_value_from_macaroon(macaroon, "state") nonce = get_value_from_macaroon(macaroon, "nonce") + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") redirect = get_value_from_macaroon(macaroon, "client_redirect_url") self.assertEqual(params["state"], [state]) self.assertEqual(params["nonce"], [nonce]) + self.assertEqual(code_verifier, "") self.assertEqual(redirect, "http://client/redirect") @override_config({"oidc_config": DEFAULT_CONFIG}) + def test_redirect_request_with_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + with self.metadata_edit({"code_challenge_methods_supported": ["S256"]}): + url = urlparse( + self.get_success( + self.provider.handle_redirect_request( + req, b"http://client/redirect" + ) + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertEqual(len(params["code_challenge"]), 1) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is set in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertNotEqual(code_verifier, "") + + @override_config({"oidc_config": {**DEFAULT_CONFIG, "pkce_method": "always"}}) + def test_redirect_request_with_forced_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + url = urlparse( + self.get_success( + self.provider.handle_redirect_request(req, b"http://client/redirect") + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertEqual(len(params["code_challenge"]), 1) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is set in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertNotEqual(code_verifier, "") + + @override_config({"oidc_config": {**DEFAULT_CONFIG, "pkce_method": "never"}}) + def test_redirect_request_with_disabled_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + # The metadata should state that PKCE is enabled. + with self.metadata_edit({"code_challenge_methods_supported": ["S256"]}): + url = urlparse( + self.get_success( + self.provider.handle_redirect_request( + req, b"http://client/redirect" + ) + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertNotIn("code_challenge", params) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is blank in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertEqual(code_verifier, "") + + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_callback_error(self) -> None: """Errors from the provider returned in the callback are displayed.""" request = Mock(args={}) @@ -601,7 +707,7 @@ class OidcHandlerTestCase(HomeserverTestCase): payload=token ) code = "code" - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) kwargs = self.fake_server.request.call_args[1] self.assertEqual(ret, token) @@ -615,13 +721,34 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertEqual(args["client_secret"], [CLIENT_SECRET]) self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + # Test providing a code verifier. + code_verifier = "code_verifier" + ret = self.get_success( + self.provider._exchange_code(code, code_verifier=code_verifier) + ) + kwargs = self.fake_server.request.call_args[1] + + self.assertEqual(ret, token) + self.assertEqual(kwargs["method"], "POST") + self.assertEqual(kwargs["uri"], self.fake_server.token_endpoint) + + args = parse_qs(kwargs["data"].decode("utf-8")) + self.assertEqual(args["grant_type"], ["authorization_code"]) + self.assertEqual(args["code"], [code]) + self.assertEqual(args["client_id"], [CLIENT_ID]) + self.assertEqual(args["client_secret"], [CLIENT_SECRET]) + self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + self.assertEqual(args["code_verifier"], [code_verifier]) + # Test error handling self.fake_server.post_token_handler.return_value = FakeResponse.json( code=400, payload={"error": "foo", "error_description": "bar"} ) from synapse.handlers.oidc import OidcError - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "foo") self.assertEqual(exc.value.error_description, "bar") @@ -629,7 +756,9 @@ class OidcHandlerTestCase(HomeserverTestCase): self.fake_server.post_token_handler.return_value = FakeResponse( code=500, body=b"Not JSON" ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "server_error") # Internal server error with JSON body @@ -637,21 +766,27 @@ class OidcHandlerTestCase(HomeserverTestCase): code=500, payload={"error": "internal_server_error"} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "internal_server_error") # 4xx error without "error" field self.fake_server.post_token_handler.return_value = FakeResponse.json( code=400, payload={} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "server_error") # 2xx error with "error" field self.fake_server.post_token_handler.return_value = FakeResponse.json( code=200, payload={"error": "some_error"} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "some_error") @override_config( @@ -688,7 +823,7 @@ class OidcHandlerTestCase(HomeserverTestCase): # timestamps. self.reactor.advance(1000) start_time = self.reactor.seconds() - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) self.assertEqual(ret, token) @@ -739,7 +874,7 @@ class OidcHandlerTestCase(HomeserverTestCase): payload=token ) code = "code" - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) self.assertEqual(ret, token) @@ -1203,6 +1338,7 @@ class OidcHandlerTestCase(HomeserverTestCase): nonce=nonce, client_redirect_url=client_redirect_url, ui_auth_session_id=ui_auth_session_id, + code_verifier="", ), ) diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py
index bddc4228bc..7c3656d049 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py
@@ -12,6 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict, Tuple + from typing_extensions import Protocol try: @@ -22,6 +24,7 @@ except ImportError: from unittest.mock import patch from pkg_resources import parse_version +from prometheus_client.core import Sample from synapse.app._base import _set_prometheus_client_use_created_metrics from synapse.metrics import REGISTRY, InFlightGauge, generate_latest @@ -30,7 +33,7 @@ from synapse.util.caches.deferred_cache import DeferredCache from tests import unittest -def get_sample_labels_value(sample): +def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: """Extract the labels and values of a sample. prometheus_client 0.5 changed the sample type to a named tuple with more @@ -48,12 +51,15 @@ def get_sample_labels_value(sample): return sample.labels, sample.value # Otherwise fall back to treating it as a plain 3 tuple. else: - _, labels, value = sample + # In older versions of prometheus_client Sample was a 3-tuple. + labels: Dict[str, str] + value: float + _, labels, value = sample # type: ignore[misc] return labels, value class TestMauLimit(unittest.TestCase): - def test_basic(self): + def test_basic(self) -> None: class MetricEntry(Protocol): foo: int bar: int @@ -62,11 +68,11 @@ class TestMauLimit(unittest.TestCase): "test1", "", labels=["test_label"], sub_metrics=["foo", "bar"] ) - def handle1(metrics): + def handle1(metrics: MetricEntry) -> None: metrics.foo += 2 metrics.bar = max(metrics.bar, 5) - def handle2(metrics): + def handle2(metrics: MetricEntry) -> None: metrics.foo += 3 metrics.bar = max(metrics.bar, 7) @@ -116,7 +122,9 @@ class TestMauLimit(unittest.TestCase): self.get_metrics_from_gauge(gauge), ) - def get_metrics_from_gauge(self, gauge): + def get_metrics_from_gauge( + self, gauge: InFlightGauge + ) -> Dict[str, Dict[Tuple[str, ...], float]]: results = {} for r in gauge.collect(): @@ -129,7 +137,7 @@ class TestMauLimit(unittest.TestCase): class BuildInfoTests(unittest.TestCase): - def test_get_build(self): + def test_get_build(self) -> None: """ The synapse_build_info metric reports the OS version, Python version, and Synapse version. @@ -147,7 +155,7 @@ class BuildInfoTests(unittest.TestCase): class CacheMetricsTests(unittest.HomeserverTestCase): - def test_cache_metric(self): + def test_cache_metric(self) -> None: """ Caches produce metrics reflecting their state when scraped. """ diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py
index c1a7fb2f8a..88f255c9ee 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py
@@ -690,41 +690,21 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase): self.hs.config.registration.enable_3pid_changes = False client_secret = "foobar" - session_id = self._request_token(self.email, client_secret) - - self.assertEqual(len(self.email_attempts), 1) - link = self._get_link_from_email() - - self._validate_token(link) - channel = self.make_request( "POST", - b"/_matrix/client/unstable/account/3pid/add", + b"/_matrix/client/unstable/account/3pid/email/requestToken", { "client_secret": client_secret, - "sid": session_id, - "auth": { - "type": "m.login.password", - "user": self.user_id, - "password": "test", - }, + "email": "test@example.com", + "send_attempt": 1, }, - access_token=self.user_id_tok, ) + self.assertEqual( HTTPStatus.BAD_REQUEST, channel.code, msg=channel.result["body"] ) - self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) - # Get user - channel = self.make_request( - "GET", - self.url_3pid, - access_token=self.user_id_tok, - ) - - self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.result["body"]) - self.assertFalse(channel.json_body["threepids"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) def test_delete_email(self) -> None: """Test deleting an email from profile""" diff --git a/tests/rest/media/v1/test_oembed.py b/tests/rest/media/v1/test_oembed.py
index 319ae8b1cc..3f7f1dbab9 100644 --- a/tests/rest/media/v1/test_oembed.py +++ b/tests/rest/media/v1/test_oembed.py
@@ -150,3 +150,13 @@ class OEmbedTests(HomeserverTestCase): result = self.parse_response({"type": "link"}) self.assertIn("og:type", result.open_graph_result) self.assertEqual(result.open_graph_result["og:type"], "website") + + def test_title_html_entities(self) -> None: + """Test HTML entities in title""" + result = self.parse_response( + {"title": "Why JSON isn&#8217;t a Good Configuration Language"} + ) + self.assertEqual( + result.open_graph_result["og:title"], + "Why JSON isn’t a Good Configuration Language", + ) diff --git a/tests/util/test_macaroons.py b/tests/util/test_macaroons.py
index f68377a05a..e56ec2c860 100644 --- a/tests/util/test_macaroons.py +++ b/tests/util/test_macaroons.py
@@ -92,6 +92,7 @@ class MacaroonGeneratorTestCase(TestCase): nonce="nonce", client_redirect_url="https://example.com/", ui_auth_session_id="", + code_verifier="", ) token = self.macaroon_generator.generate_oidc_session_token( state, session_data, duration_in_ms=2 * 60 * 1000