diff options
89 files changed, 1511 insertions, 915 deletions
diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 4bc4266c4d..e5e4e8da77 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -208,7 +208,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06 + - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 262b17a20d..b08222f289 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -174,7 +174,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06 + - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.gitignore b/.gitignore index 15fbfdddf1..2b09bddf18 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,7 @@ __pycache__/ # For direnv users /.envrc +.direnv/ # IDEs /.idea/ diff --git a/Cargo.lock b/Cargo.lock index 6e97fb8fb1..c249ec56f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.66" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" +checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" [[package]] name = "arc-swap" @@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "blake2" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ "digest", ] @@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.150" +version = "1.0.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91" +checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.150" +version = "1.0.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e" +checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" dependencies = [ "proc-macro2", "quote", @@ -343,9 +343,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db" +checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" dependencies = [ "itoa", "ryu", diff --git a/changelog.d/14263.misc b/changelog.d/14263.misc new file mode 100644 index 0000000000..11d9446a4b --- /dev/null +++ b/changelog.d/14263.misc @@ -0,0 +1 @@ +Improve performance of the `/hierarchy` endpoint. diff --git a/changelog.d/14545.misc b/changelog.d/14545.misc new file mode 100644 index 0000000000..60b6761a51 --- /dev/null +++ b/changelog.d/14545.misc @@ -0,0 +1 @@ +Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14546.misc b/changelog.d/14546.misc new file mode 100644 index 0000000000..60b6761a51 --- /dev/null +++ b/changelog.d/14546.misc @@ -0,0 +1 @@ +Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14644.bugfix b/changelog.d/14644.bugfix new file mode 100644 index 0000000000..711088bb7e --- /dev/null +++ b/changelog.d/14644.bugfix @@ -0,0 +1 @@ +Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. \ No newline at end of file diff --git a/changelog.d/14665.misc b/changelog.d/14665.misc new file mode 100644 index 0000000000..2b7c96143d --- /dev/null +++ b/changelog.d/14665.misc @@ -0,0 +1 @@ +Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. diff --git a/changelog.d/14669.bugfix b/changelog.d/14669.bugfix new file mode 100644 index 0000000000..bea316b065 --- /dev/null +++ b/changelog.d/14669.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. diff --git a/changelog.d/14672.misc b/changelog.d/14672.misc new file mode 100644 index 0000000000..b94ebed971 --- /dev/null +++ b/changelog.d/14672.misc @@ -0,0 +1 @@ +Remove dependency on jQuery on reCAPTCHA page. diff --git a/changelog.d/14673.doc b/changelog.d/14673.doc new file mode 100644 index 0000000000..7baf5f7f38 --- /dev/null +++ b/changelog.d/14673.doc @@ -0,0 +1 @@ +Declare support for Python 3.11. diff --git a/changelog.d/14674.doc b/changelog.d/14674.doc new file mode 100644 index 0000000000..df21417819 --- /dev/null +++ b/changelog.d/14674.doc @@ -0,0 +1 @@ +Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. diff --git a/changelog.d/14676.misc b/changelog.d/14676.misc new file mode 100644 index 0000000000..8a41df9c64 --- /dev/null +++ b/changelog.d/14676.misc @@ -0,0 +1 @@ +Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. diff --git a/changelog.d/14680.misc b/changelog.d/14680.misc new file mode 100644 index 0000000000..d44571b731 --- /dev/null +++ b/changelog.d/14680.misc @@ -0,0 +1 @@ +Add missing type hints. diff --git a/changelog.d/14681.misc b/changelog.d/14681.misc new file mode 100644 index 0000000000..d44571b731 --- /dev/null +++ b/changelog.d/14681.misc @@ -0,0 +1 @@ +Add missing type hints. diff --git a/changelog.d/14685.misc b/changelog.d/14685.misc new file mode 100644 index 0000000000..3ba2270100 --- /dev/null +++ b/changelog.d/14685.misc @@ -0,0 +1 @@ +Improve type annotations for the helper methods on a `CachedFunction`. \ No newline at end of file diff --git a/changelog.d/14693.misc b/changelog.d/14693.misc new file mode 100644 index 0000000000..86771f41b2 --- /dev/null +++ b/changelog.d/14693.misc @@ -0,0 +1 @@ +Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. diff --git a/changelog.d/14694.misc b/changelog.d/14694.misc new file mode 100644 index 0000000000..146238d8c5 --- /dev/null +++ b/changelog.d/14694.misc @@ -0,0 +1 @@ +Bump anyhow from 1.0.66 to 1.0.68. diff --git a/changelog.d/14695.misc b/changelog.d/14695.misc new file mode 100644 index 0000000000..57e08498be --- /dev/null +++ b/changelog.d/14695.misc @@ -0,0 +1 @@ +Bump blake2 from 0.10.5 to 0.10.6. diff --git a/changelog.d/14696.misc b/changelog.d/14696.misc new file mode 100644 index 0000000000..9849366b9f --- /dev/null +++ b/changelog.d/14696.misc @@ -0,0 +1 @@ +Bump serde_json from 1.0.89 to 1.0.91. diff --git a/changelog.d/14697.misc b/changelog.d/14697.misc new file mode 100644 index 0000000000..514209fcc3 --- /dev/null +++ b/changelog.d/14697.misc @@ -0,0 +1 @@ +Bump serde from 1.0.150 to 1.0.151. diff --git a/changelog.d/14698.misc b/changelog.d/14698.misc new file mode 100644 index 0000000000..2e2072183e --- /dev/null +++ b/changelog.d/14698.misc @@ -0,0 +1 @@ +Bump lxml from 4.9.1 to 4.9.2. diff --git a/changelog.d/14699.misc b/changelog.d/14699.misc new file mode 100644 index 0000000000..d73ef25634 --- /dev/null +++ b/changelog.d/14699.misc @@ -0,0 +1 @@ +Bump hiredis from 2.0.0 to 2.1.0. diff --git a/changelog.d/14700.misc b/changelog.d/14700.misc new file mode 100644 index 0000000000..253eb1721d --- /dev/null +++ b/changelog.d/14700.misc @@ -0,0 +1 @@ +Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. diff --git a/changelog.d/14701.misc b/changelog.d/14701.misc new file mode 100644 index 0000000000..05c89d5948 --- /dev/null +++ b/changelog.d/14701.misc @@ -0,0 +1 @@ +Bump sentry-sdk from 1.11.1 to 1.12.0. diff --git a/changelog.d/14702.misc b/changelog.d/14702.misc new file mode 100644 index 0000000000..17c0485f12 --- /dev/null +++ b/changelog.d/14702.misc @@ -0,0 +1 @@ +Bump types-setuptools from 65.6.0.1 to 65.6.0.2. diff --git a/changelog.d/14707.misc b/changelog.d/14707.misc new file mode 100644 index 0000000000..38f47a6f30 --- /dev/null +++ b/changelog.d/14707.misc @@ -0,0 +1 @@ +Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. \ No newline at end of file diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 68705b6e6d..f09cd6f87c 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -1008,8 +1008,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -1681,8 +1680,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -2533,8 +2531,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -11296,7 +11293,7 @@ "uid": "$datasource" }, "editorMode": "code", - "expr": "synapse_admin_mau_max{instance=\"$instance\", job=~\"(hhs_)?synapse\"}", + "expr": "max(synapse_admin_mau_max{instance=\"$instance\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -11310,7 +11307,7 @@ "uid": "$datasource" }, "editorMode": "code", - "expr": "synapse_admin_mau_current{instance=\"$instance\", job=~\"(hhs_)?synapse\"}", + "expr": "max(synapse_admin_mau_current{instance=\"$instance\"})", "hide": false, "legendFormat": "Current", "range": true, @@ -12760,6 +12757,6 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 149, + "version": 150, "weekStart": "" } \ No newline at end of file diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 85b2ac2580..a762ad55df 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis System requirements: - POSIX-compliant system (tested on Linux & OS X) -- Python 3.7 or later, up to Python 3.10. +- Python 3.7 or later, up to Python 3.11. - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org If building on an uncommon architecture for which pre-built wheels are diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 4d32902fea..6b8768f45d 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1148,7 +1148,7 @@ number of entries that can be stored. * `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted. They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option. - * `target_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value + * `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value for this option. * `min_cache_ttl` sets a limit under which newer cache entries are not evicted and is only applied when caches are actively being evicted/`max_cache_memory_usage` has been exceeded. This is to protect hot caches diff --git a/mypy.ini b/mypy.ini index 37acf589c9..80fbcdfeab 100644 --- a/mypy.ini +++ b/mypy.ini @@ -36,8 +36,6 @@ exclude = (?x) |tests/api/test_ratelimiting.py |tests/app/test_openid_listener.py |tests/appservice/test_scheduler.py - |tests/config/test_cache.py - |tests/config/test_tls.py |tests/crypto/test_keyring.py |tests/events/test_presence_router.py |tests/events/test_utils.py @@ -89,16 +87,13 @@ disallow_untyped_defs = False [mypy-tests.*] disallow_untyped_defs = False -[mypy-tests.config.test_api] +[mypy-tests.config.*] disallow_untyped_defs = True [mypy-tests.federation.transport.test_client] disallow_untyped_defs = True -[mypy-tests.handlers.test_sso] -disallow_untyped_defs = True - -[mypy-tests.handlers.test_user_directory] +[mypy-tests.handlers.*] disallow_untyped_defs = True [mypy-tests.metrics.test_background_process_metrics] diff --git a/poetry.lock b/poetry.lock index 6fd4bd5ba5..3fbad339a3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -318,11 +318,11 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" [[package]] name = "hiredis" -version = "2.0.0" +version = "2.1.0" description = "Python wrapper for hiredis" category = "main" optional = true -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "hyperlink" @@ -501,7 +501,7 @@ pyasn1 = ">=0.4.6" [[package]] name = "lxml" -version = "4.9.1" +version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = true @@ -1070,7 +1070,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.11.1" +version = "1.12.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true @@ -1091,6 +1091,7 @@ falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] httpx = ["httpx (>=0.16.0)"] +opentelemetry = ["opentelemetry-distro (>=0.350b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] @@ -1418,7 +1419,7 @@ python-versions = "*" [[package]] name = "types-jsonschema" -version = "4.17.0.1" +version = "4.17.0.2" description = "Typing stubs for jsonschema" category = "dev" optional = false @@ -1480,7 +1481,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-setuptools" -version = "65.6.0.1" +version = "65.6.0.2" description = "Typing stubs for setuptools" category = "dev" optional = false @@ -1866,47 +1867,94 @@ gitpython = [ {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] hiredis = [ - {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, - {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, - {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, - {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, - {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, - {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, - {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, - {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, - {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, - {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, - {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, - {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, - {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:7b339a7542a3f6a10b3bbc157e4abc9bae9628e2df7faf5f8a32f730014719ae"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dd82370c2f9f804ec617b95d25edb0fd04882251afb2ecdf08b9ced0c3aa4bcc"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:92077511d3a62109d5d11bf584e41264a993ae3c77c72de63c1f741b7809bacb"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6544c7807cbb75bc6ae9ab85773b4413edbcd55342e9e3d7d3f159f677f7428"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8181d73f25943fbdca904154e51b845317103cee08116cfae258f96927ce1e74"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040f861e4e43daa9287f3a85979542f9c7ee8cfab695fa662f3b6186c6f7d5e8"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5ae8c1af82a8000742003cb16a6fa6c57919abb861ab214dcb27db8573ee64"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b9aa1b0ec46dec5b05dcec22e50bbd4af33da121fca83bd2601dc60c79183f9"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c53c36a630a6c6fd9dfe439f4266e564ca58995015a780c1d964567ebf328466"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05aab35210bd7fbd7bd066efb2a42eb5c2878c2c137a9cff597204be2c07475b"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e6097e1cef647c665f71cd0e58346389580db98365e804f7a9ad5d96e66b7150"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:32f98370efed38088d000df2eb2c8ed43d93d99bbf4a0a740e15eb4a887cc23f"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b85276ed57e0aee8910b48383a38a299851935ba134460bad394988c750985fe"}, + {file = "hiredis-2.1.0-cp310-cp310-win32.whl", hash = "sha256:bd9d99606008a8cfa6b9e950abaa35f5b87496f03e63b73197d02b0fe7ecb6d3"}, + {file = "hiredis-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a8e796c94b7b8c63c99757d6ec2075069e4c362dfb0f130aaf874422bea3e7d"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:e7bb5cab604fc45b45cee40e84e84d9e30eeb34c571a3784392ae658273bbd23"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e0d4b074ff5ebba00933da27a06f3752b8af2448a6aa9dc895d5279f43011530"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0c2dbaffd4a9e8df04731a012c8a67b7517abec7e53bb12c3cd749865c63428"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c19151e79b36e0d849899a21fc10539aa1903af94b31754bddab1bea876cd508"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ec41519a533f5cd1f1f8bd1797929358117c8e4570b679b469f768b45b7dbf"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f0db3667fa8abbd37ac66385b460841029033bfc1ba8d7e5b3ff1e01d3346a"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f592d1522b5981890b34b0b814f4bfa4a68b23ee90f538aac321d17e8bf859c8"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dddd2be67de25a62b3bf871f091181c13da3b32186d4be6af49dadbf6fdc266d"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ee8f6d0774cd6179c625688201e961a2d03da212230adaa2193cfb7a04f9169"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5000942ffb6b6410ccbc87089c15fde5f48bd205664ee8b3067e6b2fb5689485"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:21e0017b8f50abd13b4c4c4218c7dfd5a42623e3255b460dfa5f70b45c4e7c3e"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40b55fb46fcc78b04190176c0ae28bfa3cc7f418fca9df06c037028af5942b6a"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24a55169a7f0bd9458935ac644bf8191f127c8aa50cdd70c0b87928cc515cae5"}, + {file = "hiredis-2.1.0-cp311-cp311-win32.whl", hash = "sha256:bb60f79e8c1eb5971b10fd256764ea0c89c4ad2d55ac4379981f678f349411f2"}, + {file = "hiredis-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b223668844f26034759a6c24a72f0bb8e4fb64a43b27e2f3e8378639eaac1661"}, + {file = "hiredis-2.1.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:7f7e7d91d6533fcb1939d467cf8bfb98640edf715897959f31ae83f5ad29aed3"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531d1d3955244831b69272b993e16f93489ce2dadfdf800ac856dc2d9a43d353"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ffcbfc4db52dd87cdfd53bda45881ab3ab07c80ec43244fd8d70ee69d42c01"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:023b3b3ac410d6cfdb45ee943b8c528c90379f31419a1fd229888aa2b965732d"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c972385a0647120d4b0fe0e9567257cad7b2577b9f1315815713c571af0e778d"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32893825426e73d57b3290b68110dd76229945e6c79b08a37795f536501935c4"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:262148f9b616c0cdd0f2c6bda45cd0f1ce6ce2d1974efd296b85b44e5c7567c2"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d601c27b9599fe52cade3096351f92f665e527d29af8d3e29353a76bfcf5615"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d248acc7d7713c1b3d48ed8ea67d6ba43b104aa67d63078846a3590adbab6b73"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:969ffe37a8980a6e5404993ccfe605a40fa6732fa6d7b26a1a718c9121197002"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:288d5d0566d3cbcd800e46c7a547428d321842898b8c7de037a7e78b5644e88a"}, + {file = "hiredis-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:06cb776d3cd3cbec86010f1bab6895ee16af8036aae8c3594a5e96c24f0f83a5"}, + {file = "hiredis-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6766376dc43ef186113422ecacec0ece0d4b12c0e5f4b556669e639b20ccabb1"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:41afba30304adcbe1c93fc8272a7169b7fc4e4d3d470ad8babd391678a519d76"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6df0115f8b0766cd3d12416e2e2e914efed5b1a1a27605c9f37bc92de086877a"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d7d7078f3b841ad86e35459e9f1a49db6d793b796a25fe866333166196d9fec"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:835c4cbf8b38c83240b3eb9bd575cd1bfefe5ea5c46cc5ac2bf2d1f47d1fd696"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:718589c48e97820bdc2a99e2621b5039884cc23199213756054d10cd309ad56c"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2d96be6917ea8f753691a4674f682dd5e145b70edab28c05aa5552ae873e843"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5fe1bb4b1525751f3050337097b3b2bfe445836e59a5a0984928dd0797f9abf"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91dc73310b92b4aeccffdcd4a762955fe71380f5eaa4e242ee95019e41519101"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bb858218de60a930a164a991fff001c70b0c3d923d3ae40fef2acf3321126b00"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:53040c3b3488b52f4609775453fc759262f2885b733150ee2e1d88257fdafed8"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c9b7d6d7bf35e1e2217b2847710154b11d25bf86b77bb7e190161f8b89917e"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:dfbe939fdddbc7b90cab4124f3ddd6391099fb964f6dab3386aa8cf56f37b5ba"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a51cb4ea466276a845a940931357b4a876f903eabde514ba95e45050e1c2150"}, + {file = "hiredis-2.1.0-cp38-cp38-win32.whl", hash = "sha256:8bce4c687136bf13df76072072b9baadbd52f7d1b143fbbda96387f50e8ebaeb"}, + {file = "hiredis-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:1f94684b13fbbee1239303018d5ea900d786e486cdb130cde3144d53f4e262e4"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:879668ffab582bdffd9f10f6c8797aac055db183f266e3aa3a6438ff0768bc29"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1d5a99de0fd02438f251e50ec64936d22d542c8e5d80bdec236f9713eeef334"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab622bcddcf334b4b1fc4b22e163e93160e3afdd7feaedd77ac6f258e0c77b68"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c4f23ff450fb8d73edf06fc7475a4e81a3f9b03a9a04a907ec81c84052fcf"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9f8b8daef346ffc0268d7086c213ab24c2a3fcbd4249eacfbb3635602c79d20"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2039cdaa2e6656eae4a2e2537ed77e27f29b7487b97ce7ae6a3cb88d01b968"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d3168da0a81fa0a9e4bc6e14316beac8e5f1b439ca5cc5af7f9a558cfba741"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0021ba034b74c5006f62e4cfdd79d04c7c720731eda256ce29d769ac6483adc3"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39a1bb45bcd698baf70ad4e9a94af164525bf053caea7df3777172d20d69538a"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c1b636b05777536a83b4cced157cbdc2d0012d494a9ec2f7b7e07c54296cd773"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:58a7ceb71f967fcc1878fb64666a12fbc5f243ab00d0653d3752a811941d8261"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c5263c676dc4d55202e7ca0429b949fc6ba7c0dd3a3a2b80538593ab27d82836"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5879d13025b04903ddf71921812db27fe1156a0952ad253014354d72463aaa9"}, + {file = "hiredis-2.1.0-cp39-cp39-win32.whl", hash = "sha256:9259f637d77544ffeb97acb0a87fdd192a8aced7a2fbd7439160dbee8341d446"}, + {file = "hiredis-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fb818b6e0981e16dfdfc9e507c9842f8d210e6ecaf3edb8ac3039dbd24768839"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:648d4648bf6b3dcc418a974df143b2f96627ab8b50bda23a57759c273880ecfb"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:654949cfc0bc76a5292b6ac111113b2eafb0739e0496495368981ea2e80bf4ec"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2a98b835c2088998a47da51b1b3661b587b2d4b3305d03fc9893888cc2aa54"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7222bd9243387d778245619d0ac62d35cf72ee746ec0efb7b9b230ae3e0c3a39"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:778f6de73c3abd67d447a3442f89e7d43a8de1eb5093f416af14dddc1d5c9cb5"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c4cfb61fe642f30a22789055847004393bc65b5686988c64191e379ea4ccd069"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03b6bef7eb50415aca87200a511d66a2fd69f1fcc75cfe1408e1201cbe28ddfb"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3195e13a700f6ff35894c4920fcce8f6c2b01cdbc01f76fe567753c495849e9b"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19f724405c808a89db422ed1010caab80a16d3e5b49632356ae7912513b6d58e"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8ecebeff966b412138b0cd105d7572f8d5e65e96355af699863890f8370707e6"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4f34eefaf164bf43b29ccc809c168248eb95001837ed0e9e3279891f57ae2fab"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11fad16beb9d623ea423c9129bab0e392ea4c84363d61c125f679be3d029442f"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c763eb9a1414c4d665945c70ae2ef74a843600667b0069fe90e2aabc78e5411"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edb7f156a8f8a1999574f27bda67dd2bff2d5b180bb6aed996a1792cafbcc668"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e057d5545189d4c9e22ae0f7dc283ea0a225f56999511022c062cce7f9589d69"}, ] hyperlink = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, @@ -2020,76 +2068,81 @@ ldap3 = [ {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, ] lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, ] markupsafe = [ {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, @@ -2563,8 +2616,8 @@ semantic-version = [ {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"}, - {file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"}, + {file = "sentry-sdk-1.12.0.tar.gz", hash = "sha256:dc0fe6ef2f77a3853b399c75c97d87be7666098817c1c314f8fcdf68a6865914"}, + {file = "sentry_sdk-1.12.0-py2.py3-none-any.whl", hash = "sha256:3c9bc64025976842c1103cd75d45cff94a7c0cc48fa07770d07a09d2ab8dac30"}, ] service-identity = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, @@ -2795,8 +2848,8 @@ types-ipaddress = [ {file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"}, ] types-jsonschema = [ - {file = "types-jsonschema-4.17.0.1.tar.gz", hash = "sha256:62625d492e4930411a431909ac32301aeab6180500e70ee222f81d43204cfb3c"}, - {file = "types_jsonschema-4.17.0.1-py3-none-any.whl", hash = "sha256:77badbe3881cbf79ac9561be2be2b1f37ab104b13afd2231840e6dd6e94e63c2"}, + {file = "types-jsonschema-4.17.0.2.tar.gz", hash = "sha256:8b9e1140d4d780f0f19b5cab1b8a3732e8dd5e49dbc1f174cc0b499125ca6f6c"}, + {file = "types_jsonschema-4.17.0.2-py3-none-any.whl", hash = "sha256:8fd2f9aea4da54f9a811baa6963aac10fd680c18baa6237392c079b97d152738"}, ] types-opentracing = [ {file = "types-opentracing-2.4.10.tar.gz", hash = "sha256:6101414f3b6d3b9c10f1c510a261e8439b6c8d67c723d5c2872084697b4580a7"}, @@ -2823,8 +2876,8 @@ types-requests = [ {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, ] types-setuptools = [ - {file = "types-setuptools-65.6.0.1.tar.gz", hash = "sha256:a03cf72f336929c9405f485dd90baef31a401776675f785f69a5a519f0b099ca"}, - {file = "types_setuptools-65.6.0.1-py3-none-any.whl", hash = "sha256:c957599502195ab98e90f0560466fa963f6a23373905e6d4e1772dbfaf1e44b7"}, + {file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"}, + {file = "types_setuptools-65.6.0.2-py3-none-any.whl", hash = "sha256:2c2b4f756f79778074ce2d21f745aa737b12160d9f8dfa274f47a7287c7a2fee"}, ] types-urllib3 = [ {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, diff --git a/synapse/config/cache.py b/synapse/config/cache.py index eb4194a5a9..015b2a138e 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -16,7 +16,7 @@ import logging import os import re import threading -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Mapping, Optional import attr @@ -94,7 +94,7 @@ def add_resizable_cache( class CacheConfig(Config): section = "caches" - _environ = os.environ + _environ: Mapping[str, str] = os.environ event_cache_size: int cache_factors: Dict[str, float] diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 8b9ef25d29..30f2d46c3c 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -2031,7 +2031,7 @@ class PasswordAuthProvider: self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = [] # Mapping from login type to login parameters - self._supported_login_types: Dict[str, Iterable[str]] = {} + self._supported_login_types: Dict[str, Tuple[str, ...]] = {} # Mapping from login type to auth checker callbacks self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {} diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b2784d7333..eca75f1108 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1343,32 +1343,53 @@ class FederationHandler: ) EventValidator().validate_builder(builder) - event, context = await self.event_creation_handler.create_new_client_event( - builder=builder - ) - event, context = await self.add_display_name_to_third_party_invite( - room_version_obj, event_dict, event, context - ) + # Try several times, it could fail with PartialStateConflictError + # in send_membership_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + ( + event, + context, + ) = await self.event_creation_handler.create_new_client_event( + builder=builder + ) - EventValidator().validate_new(event, self.config) + event, context = await self.add_display_name_to_third_party_invite( + room_version_obj, event_dict, event, context + ) - # We need to tell the transaction queue to send this out, even - # though the sender isn't a local user. - event.internal_metadata.send_on_behalf_of = self.hs.hostname + EventValidator().validate_new(event, self.config) - try: - validate_event_for_room_version(event) - await self._event_auth_handler.check_auth_rules_from_context(event) - except AuthError as e: - logger.warning("Denying new third party invite %r because %s", event, e) - raise e + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = self.hs.hostname - await self._check_signature(event, context) + try: + validate_event_for_room_version(event) + await self._event_auth_handler.check_auth_rules_from_context( + event + ) + except AuthError as e: + logger.warning( + "Denying new third party invite %r because %s", event, e + ) + raise e - # We retrieve the room member handler here as to not cause a cyclic dependency - member_handler = self.hs.get_room_member_handler() - await member_handler.send_membership_event(None, event, context) + await self._check_signature(event, context) + + # We retrieve the room member handler here as to not cause a cyclic dependency + member_handler = self.hs.get_room_member_handler() + await member_handler.send_membership_event(None, event, context) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass else: destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)} @@ -1400,28 +1421,46 @@ class FederationHandler: room_version_obj, event_dict ) - event, context = await self.event_creation_handler.create_new_client_event( - builder=builder - ) - event, context = await self.add_display_name_to_third_party_invite( - room_version_obj, event_dict, event, context - ) + # Try several times, it could fail with PartialStateConflictError + # in send_membership_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + ( + event, + context, + ) = await self.event_creation_handler.create_new_client_event( + builder=builder + ) + event, context = await self.add_display_name_to_third_party_invite( + room_version_obj, event_dict, event, context + ) - try: - validate_event_for_room_version(event) - await self._event_auth_handler.check_auth_rules_from_context(event) - except AuthError as e: - logger.warning("Denying third party invite %r because %s", event, e) - raise e - await self._check_signature(event, context) + try: + validate_event_for_room_version(event) + await self._event_auth_handler.check_auth_rules_from_context(event) + except AuthError as e: + logger.warning("Denying third party invite %r because %s", event, e) + raise e + await self._check_signature(event, context) + + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = get_domain_from_id( + event.sender + ) - # We need to tell the transaction queue to send this out, even - # though the sender isn't a local user. - event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender) + # We retrieve the room member handler here as to not cause a cyclic dependency + member_handler = self.hs.get_room_member_handler() + await member_handler.send_membership_event(None, event, context) - # We retrieve the room member handler here as to not cause a cyclic dependency - member_handler = self.hs.get_room_member_handler() - await member_handler.send_membership_event(None, event, context) + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass async def add_display_name_to_third_party_invite( self, diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 66aca2f864..31df7f55cc 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -610,6 +610,8 @@ class FederationEventHandler: self._state_storage_controller.notify_event_un_partial_stated( event.event_id ) + # Notify that there's a new row in the un_partial_stated_events stream. + self._notifier.notify_replication() @trace async def backfill( diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 845f683358..88fc51a4c9 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -37,7 +37,6 @@ from synapse.api.errors import ( AuthError, Codes, ConsentNotGivenError, - LimitExceededError, NotFoundError, ShadowBanError, SynapseError, @@ -999,60 +998,73 @@ class EventCreationHandler: event.internal_metadata.stream_ordering, ) - event, context = await self.create_event( - requester, - event_dict, - txn_id=txn_id, - allow_no_prev_events=allow_no_prev_events, - prev_event_ids=prev_event_ids, - state_event_ids=state_event_ids, - outlier=outlier, - historical=historical, - depth=depth, - ) + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.create_event( + requester, + event_dict, + txn_id=txn_id, + allow_no_prev_events=allow_no_prev_events, + prev_event_ids=prev_event_ids, + state_event_ids=state_event_ids, + outlier=outlier, + historical=historical, + depth=depth, + ) - assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % ( - event.sender, - ) + assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % ( + event.sender, + ) - spam_check_result = await self.spam_checker.check_event_for_spam(event) - if spam_check_result != self.spam_checker.NOT_SPAM: - if isinstance(spam_check_result, tuple): - try: - [code, dict] = spam_check_result - raise SynapseError( - 403, - "This message had been rejected as probable spam", - code, - dict, - ) - except ValueError: - logger.error( - "Spam-check module returned invalid error value. Expecting [code, dict], got %s", - spam_check_result, - ) + spam_check_result = await self.spam_checker.check_event_for_spam(event) + if spam_check_result != self.spam_checker.NOT_SPAM: + if isinstance(spam_check_result, tuple): + try: + [code, dict] = spam_check_result + raise SynapseError( + 403, + "This message had been rejected as probable spam", + code, + dict, + ) + except ValueError: + logger.error( + "Spam-check module returned invalid error value. Expecting [code, dict], got %s", + spam_check_result, + ) - raise SynapseError( - 403, - "This message has been rejected as probable spam", - Codes.FORBIDDEN, - ) + raise SynapseError( + 403, + "This message has been rejected as probable spam", + Codes.FORBIDDEN, + ) - # Backwards compatibility: if the return value is not an error code, it - # means the module returned an error message to be included in the - # SynapseError (which is now deprecated). - raise SynapseError( - 403, - spam_check_result, - Codes.FORBIDDEN, + # Backwards compatibility: if the return value is not an error code, it + # means the module returned an error message to be included in the + # SynapseError (which is now deprecated). + raise SynapseError( + 403, + spam_check_result, + Codes.FORBIDDEN, + ) + + ev = await self.handle_new_client_event( + requester=requester, + events_and_context=[(event, context)], + ratelimit=ratelimit, + ignore_shadow_ban=ignore_shadow_ban, ) - ev = await self.handle_new_client_event( - requester=requester, - events_and_context=[(event, context)], - ratelimit=ratelimit, - ignore_shadow_ban=ignore_shadow_ban, - ) + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass # we know it was persisted, so must have a stream ordering assert ev.internal_metadata.stream_ordering @@ -1356,7 +1368,7 @@ class EventCreationHandler: Raises: ShadowBanError if the requester has been shadow-banned. - SynapseError(503) if attempting to persist a partial state event in + PartialStateConflictError if attempting to persist a partial state event in a room that has been un-partial stated. """ extra_users = extra_users or [] @@ -1418,34 +1430,23 @@ class EventCreationHandler: # We now persist the event (and update the cache in parallel, since we # don't want to block on it). event, context = events_and_context[0] - try: - result, _ = await make_deferred_yieldable( - gather_results( - ( - run_in_background( - self._persist_events, - requester=requester, - events_and_context=events_and_context, - ratelimit=ratelimit, - extra_users=extra_users, - ), - run_in_background( - self.cache_joined_hosts_for_events, events_and_context - ).addErrback( - log_failure, "cache_joined_hosts_for_event failed" - ), + result, _ = await make_deferred_yieldable( + gather_results( + ( + run_in_background( + self._persist_events, + requester=requester, + events_and_context=events_and_context, + ratelimit=ratelimit, + extra_users=extra_users, ), - consumeErrors=True, - ) - ).addErrback(unwrapFirstError) - except PartialStateConflictError as e: - # The event context needs to be recomputed. - # Turn the error into a 429, as a hint to the client to try again. - logger.info( - "Room %s was un-partial stated while persisting client event.", - event.room_id, + run_in_background( + self.cache_joined_hosts_for_events, events_and_context + ).addErrback(log_failure, "cache_joined_hosts_for_event failed"), + ), + consumeErrors=True, ) - raise LimitExceededError(msg=e.msg, errcode=e.errcode, retry_after_ms=0) + ).addErrback(unwrapFirstError) return result @@ -2012,26 +2013,39 @@ class EventCreationHandler: for user_id in members: requester = create_requester(user_id, authenticated_entity=self.server_name) try: - event, context = await self.create_event( - requester, - { - "type": EventTypes.Dummy, - "content": {}, - "room_id": room_id, - "sender": user_id, - }, - ) + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.create_event( + requester, + { + "type": EventTypes.Dummy, + "content": {}, + "room_id": room_id, + "sender": user_id, + }, + ) - event.internal_metadata.proactively_send = False + event.internal_metadata.proactively_send = False - # Since this is a dummy-event it is OK if it is sent by a - # shadow-banned user. - await self.handle_new_client_event( - requester, - events_and_context=[(event, context)], - ratelimit=False, - ignore_shadow_ban=True, - ) + # Since this is a dummy-event it is OK if it is sent by a + # shadow-banned user. + await self.handle_new_client_event( + requester, + events_and_context=[(event, context)], + ratelimit=False, + ignore_shadow_ban=True, + ) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass return True except AuthError: logger.info( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f81241c2b3..572c7b4db3 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -62,6 +62,7 @@ from synapse.events.utils import copy_and_fixup_power_levels_contents from synapse.handlers.relations import BundledAggregations from synapse.module_api import NOT_SPAM from synapse.rest.admin._base import assert_user_is_admin +from synapse.storage.databases.main.events import PartialStateConflictError from synapse.streams import EventSource from synapse.types import ( JsonDict, @@ -207,46 +208,64 @@ class RoomCreationHandler: new_room_id = self._generate_room_id() - # Check whether the user has the power level to carry out the upgrade. - # `check_auth_rules_from_context` will check that they are in the room and have - # the required power level to send the tombstone event. - ( - tombstone_event, - tombstone_context, - ) = await self.event_creation_handler.create_event( - requester, - { - "type": EventTypes.Tombstone, - "state_key": "", - "room_id": old_room_id, - "sender": user_id, - "content": { - "body": "This room has been replaced", - "replacement_room": new_room_id, - }, - }, - ) - validate_event_for_room_version(tombstone_event) - await self._event_auth_handler.check_auth_rules_from_context(tombstone_event) + # Try several times, it could fail with PartialStateConflictError + # in _upgrade_room, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + # Check whether the user has the power level to carry out the upgrade. + # `check_auth_rules_from_context` will check that they are in the room and have + # the required power level to send the tombstone event. + ( + tombstone_event, + tombstone_context, + ) = await self.event_creation_handler.create_event( + requester, + { + "type": EventTypes.Tombstone, + "state_key": "", + "room_id": old_room_id, + "sender": user_id, + "content": { + "body": "This room has been replaced", + "replacement_room": new_room_id, + }, + }, + ) + validate_event_for_room_version(tombstone_event) + await self._event_auth_handler.check_auth_rules_from_context( + tombstone_event + ) - # Upgrade the room - # - # If this user has sent multiple upgrade requests for the same room - # and one of them is not complete yet, cache the response and - # return it to all subsequent requests - ret = await self._upgrade_response_cache.wrap( - (old_room_id, user_id), - self._upgrade_room, - requester, - old_room_id, - old_room, # args for _upgrade_room - new_room_id, - new_version, - tombstone_event, - tombstone_context, - ) + # Upgrade the room + # + # If this user has sent multiple upgrade requests for the same room + # and one of them is not complete yet, cache the response and + # return it to all subsequent requests + ret = await self._upgrade_response_cache.wrap( + (old_room_id, user_id), + self._upgrade_room, + requester, + old_room_id, + old_room, # args for _upgrade_room + new_room_id, + new_version, + tombstone_event, + tombstone_context, + ) - return ret + return ret + except PartialStateConflictError as e: + # Clean up the cache so we can retry properly + self._upgrade_response_cache.unset((old_room_id, user_id)) + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass + + # This is to satisfy mypy and should never happen + raise PartialStateConflictError() async def _upgrade_room( self, diff --git a/synapse/handlers/room_batch.py b/synapse/handlers/room_batch.py index 411a6fb22f..c73d2adaad 100644 --- a/synapse/handlers/room_batch.py +++ b/synapse/handlers/room_batch.py @@ -375,6 +375,8 @@ class RoomBatchHandler: # Events are sorted by (topological_ordering, stream_ordering) # where topological_ordering is just depth. for (event, context) in reversed(events_to_persist): + # This call can't raise `PartialStateConflictError` since we forbid + # use of the historical batch API during partial state await self.event_creation_handler.handle_new_client_event( await self.create_requester_for_user_id_from_app_service( event.sender, app_service_requester.app_service diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 0c39e852a1..d236cc09b5 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -34,6 +34,7 @@ from synapse.events.snapshot import EventContext from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN from synapse.logging import opentracing from synapse.module_api import NOT_SPAM +from synapse.storage.databases.main.events import PartialStateConflictError from synapse.types import ( JsonDict, Requester, @@ -392,60 +393,81 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): event_pos = await self.store.get_position_for_event(existing_event_id) return existing_event_id, event_pos.stream - event, context = await self.event_creation_handler.create_event( - requester, - { - "type": EventTypes.Member, - "content": content, - "room_id": room_id, - "sender": requester.user.to_string(), - "state_key": user_id, - # For backwards compatibility: - "membership": membership, - "origin_server_ts": origin_server_ts, - }, - txn_id=txn_id, - allow_no_prev_events=allow_no_prev_events, - prev_event_ids=prev_event_ids, - state_event_ids=state_event_ids, - depth=depth, - require_consent=require_consent, - outlier=outlier, - historical=historical, - ) - - prev_state_ids = await context.get_prev_state_ids( - StateFilter.from_types([(EventTypes.Member, None)]) - ) + # Try several times, it could fail with PartialStateConflictError, + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.event_creation_handler.create_event( + requester, + { + "type": EventTypes.Member, + "content": content, + "room_id": room_id, + "sender": requester.user.to_string(), + "state_key": user_id, + # For backwards compatibility: + "membership": membership, + "origin_server_ts": origin_server_ts, + }, + txn_id=txn_id, + allow_no_prev_events=allow_no_prev_events, + prev_event_ids=prev_event_ids, + state_event_ids=state_event_ids, + depth=depth, + require_consent=require_consent, + outlier=outlier, + historical=historical, + ) - prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None) + prev_state_ids = await context.get_prev_state_ids( + StateFilter.from_types([(EventTypes.Member, None)]) + ) - if event.membership == Membership.JOIN: - newly_joined = True - if prev_member_event_id: - prev_member_event = await self.store.get_event(prev_member_event_id) - newly_joined = prev_member_event.membership != Membership.JOIN - - # Only rate-limit if the user actually joined the room, otherwise we'll end - # up blocking profile updates. - if newly_joined and ratelimit: - await self._join_rate_limiter_local.ratelimit(requester) - await self._join_rate_per_room_limiter.ratelimit( - requester, key=room_id, update=False + prev_member_event_id = prev_state_ids.get( + (EventTypes.Member, user_id), None ) - with opentracing.start_active_span("handle_new_client_event"): - result_event = await self.event_creation_handler.handle_new_client_event( - requester, - events_and_context=[(event, context)], - extra_users=[target], - ratelimit=ratelimit, - ) - if event.membership == Membership.LEAVE: - if prev_member_event_id: - prev_member_event = await self.store.get_event(prev_member_event_id) - if prev_member_event.membership == Membership.JOIN: - await self._user_left_room(target, room_id) + if event.membership == Membership.JOIN: + newly_joined = True + if prev_member_event_id: + prev_member_event = await self.store.get_event( + prev_member_event_id + ) + newly_joined = prev_member_event.membership != Membership.JOIN + + # Only rate-limit if the user actually joined the room, otherwise we'll end + # up blocking profile updates. + if newly_joined and ratelimit: + await self._join_rate_limiter_local.ratelimit(requester) + await self._join_rate_per_room_limiter.ratelimit( + requester, key=room_id, update=False + ) + with opentracing.start_active_span("handle_new_client_event"): + result_event = ( + await self.event_creation_handler.handle_new_client_event( + requester, + events_and_context=[(event, context)], + extra_users=[target], + ratelimit=ratelimit, + ) + ) + + if event.membership == Membership.LEAVE: + if prev_member_event_id: + prev_member_event = await self.store.get_event( + prev_member_event_id + ) + if prev_member_event.membership == Membership.JOIN: + await self._user_left_room(target, room_id) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass # we know it was persisted, so should have a stream ordering assert result_event.internal_metadata.stream_ordering @@ -1234,6 +1256,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): ratelimit: Whether to rate limit this request. Raises: SynapseError if there was a problem changing the membership. + PartialStateConflictError: if attempting to persist a partial state event in + a room that has been un-partial stated. """ target_user = UserID.from_string(event.state_key) room_id = event.room_id @@ -1863,21 +1887,37 @@ class RoomMemberMasterHandler(RoomMemberHandler): list(previous_membership_event.auth_event_ids()) + prev_event_ids ) - event, context = await self.event_creation_handler.create_event( - requester, - event_dict, - txn_id=txn_id, - prev_event_ids=prev_event_ids, - auth_event_ids=auth_event_ids, - outlier=True, - ) - event.internal_metadata.out_of_band_membership = True + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.event_creation_handler.create_event( + requester, + event_dict, + txn_id=txn_id, + prev_event_ids=prev_event_ids, + auth_event_ids=auth_event_ids, + outlier=True, + ) + event.internal_metadata.out_of_band_membership = True + + result_event = ( + await self.event_creation_handler.handle_new_client_event( + requester, + events_and_context=[(event, context)], + extra_users=[UserID.from_string(target_user)], + ) + ) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass - result_event = await self.event_creation_handler.handle_new_client_event( - requester, - events_and_context=[(event, context)], - extra_users=[UserID.from_string(target_user)], - ) # we know it was persisted, so must have a stream ordering assert result_event.internal_metadata.stream_ordering diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 8d08625237..c6b869c6f4 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -20,7 +20,6 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Set, import attr from synapse.api.constants import ( - EventContentFields, EventTypes, HistoryVisibility, JoinRules, @@ -701,13 +700,6 @@ class RoomSummaryHandler: # there should always be an entry assert stats is not None, "unable to retrieve stats for %s" % (room_id,) - current_state_ids = await self._storage_controllers.state.get_current_state_ids( - room_id - ) - create_event = await self._store.get_event( - current_state_ids[(EventTypes.Create, "")] - ) - entry = { "room_id": stats["room_id"], "name": stats["name"], @@ -720,7 +712,7 @@ class RoomSummaryHandler: stats["history_visibility"] == HistoryVisibility.WORLD_READABLE ), "guest_can_join": stats["guest_access"] == "can_join", - "room_type": create_event.content.get(EventContentFields.ROOM_TYPE), + "room_type": stats["room_type"], } if self._msc3266_enabled: @@ -730,7 +722,11 @@ class RoomSummaryHandler: # Federation requests need to provide additional information so the # requested server is able to filter the response appropriately. if for_federation: + current_state_ids = ( + await self._storage_controllers.state.get_current_state_ids(room_id) + ) room_version = await self._store.get_room_version(room_id) + if await self._event_auth_handler.has_restricted_join_rules( current_state_ids, room_version ): diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index b4dad47b45..658d89210d 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -36,6 +36,7 @@ from synapse.replication.tcp.streams import ( TagAccountDataStream, ToDeviceStream, TypingStream, + UnPartialStatedEventStream, UnPartialStatedRoomStream, ) from synapse.replication.tcp.streams.events import ( @@ -43,7 +44,10 @@ from synapse.replication.tcp.streams.events import ( EventsStreamEventRow, EventsStreamRow, ) -from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStreamRow +from synapse.replication.tcp.streams.partial_state import ( + UnPartialStatedEventStreamRow, + UnPartialStatedRoomStreamRow, +) from synapse.types import PersistedEventPosition, ReadReceipt, StreamKeyType, UserID from synapse.util.async_helpers import Linearizer, timeout_deferred from synapse.util.metrics import Measure @@ -247,6 +251,14 @@ class ReplicationDataHandler: self._state_storage_controller.notify_room_un_partial_stated( row.room_id ) + elif stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + + # Wake up any tasks waiting for the event to be un-partial-stated. + self._state_storage_controller.notify_event_un_partial_stated( + row.event_id + ) await self._presence_handler.process_replication_rows( stream_name, instance_name, token, rows diff --git a/synapse/replication/tcp/streams/__init__.py b/synapse/replication/tcp/streams/__init__.py index 8575666d9c..110f10aab9 100644 --- a/synapse/replication/tcp/streams/__init__.py +++ b/synapse/replication/tcp/streams/__init__.py @@ -42,7 +42,10 @@ from synapse.replication.tcp.streams._base import ( ) from synapse.replication.tcp.streams.events import EventsStream from synapse.replication.tcp.streams.federation import FederationStream -from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream +from synapse.replication.tcp.streams.partial_state import ( + UnPartialStatedEventStream, + UnPartialStatedRoomStream, +) STREAMS_MAP = { stream.NAME: stream @@ -63,6 +66,7 @@ STREAMS_MAP = { AccountDataStream, UserSignatureStream, UnPartialStatedRoomStream, + UnPartialStatedEventStream, ) } @@ -83,4 +87,5 @@ __all__ = [ "AccountDataStream", "UserSignatureStream", "UnPartialStatedRoomStream", + "UnPartialStatedEventStream", ] diff --git a/synapse/replication/tcp/streams/partial_state.py b/synapse/replication/tcp/streams/partial_state.py index 18f087ffa2..b5a2ae74b6 100644 --- a/synapse/replication/tcp/streams/partial_state.py +++ b/synapse/replication/tcp/streams/partial_state.py @@ -46,3 +46,31 @@ class UnPartialStatedRoomStream(Stream): current_token_without_instance(store.get_un_partial_stated_rooms_token), store.get_un_partial_stated_rooms_from_stream, ) + + +@attr.s(slots=True, frozen=True, auto_attribs=True) +class UnPartialStatedEventStreamRow: + # ID of the event that has been un-partial-stated. + event_id: str + + # True iff the rejection status of the event changed as a result of being + # un-partial-stated. + rejection_status_changed: bool + + +class UnPartialStatedEventStream(Stream): + """ + Stream to notify about events becoming un-partial-stated. + """ + + NAME = "un_partial_stated_event" + ROW_TYPE = UnPartialStatedEventStreamRow + + def __init__(self, hs: "HomeServer"): + store = hs.get_datastores().main + super().__init__( + hs.get_instance_name(), + # TODO(faster_joins, multiple writers): we need to account for instance names + current_token_without_instance(store.get_un_partial_stated_events_token), + store.get_un_partial_stated_events_from_stream, + ) diff --git a/synapse/res/templates/recaptcha.html b/synapse/res/templates/recaptcha.html index 8204928cdf..f00992a24b 100644 --- a/synapse/res/templates/recaptcha.html +++ b/synapse/res/templates/recaptcha.html @@ -3,11 +3,10 @@ {% block header %} <script src="https://www.recaptcha.net/recaptcha/api.js" async defer></script> -<script src="//code.jquery.com/jquery-1.11.2.min.js"></script> <link rel="stylesheet" href="/_matrix/static/client/register/style.css"> <script> function captchaDone() { - $('#registrationForm').submit(); + document.getElementById('registrationForm').submit(); } </script> {% endblock %} diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index ee5469d5a8..fdfb46ab82 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -202,14 +202,20 @@ class StateHandler: room_id: the room_id containing the given events. event_ids: the events whose state should be fetched and resolved. await_full_state: if `True`, will block if we do not yet have complete state - at the given `event_id`s, regardless of whether `state_filter` is - satisfied by partial state. + at these events and `state_filter` is not satisfied by partial state. + Defaults to `True`. Returns: the state dict (a mapping from (event_type, state_key) -> event_id) which holds the resolution of the states after the given event IDs. """ logger.debug("calling resolve_state_groups from compute_state_after_events") + if ( + await_full_state + and state_filter + and not state_filter.must_await_full_state(self.hs.is_mine_id) + ): + await_full_state = False ret = await self.resolve_state_groups_for_events( room_id, event_ids, await_full_state ) diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 7ebe34f773..3a0c370fde 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -275,15 +275,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas ) self.db_pool.updates.register_background_index_update( - "event_push_summary_unique_index", - index_name="event_push_summary_unique_index", - table="event_push_summary", - columns=["user_id", "room_id"], - unique=True, - replaces_index="event_push_summary_user_rm", - ) - - self.db_pool.updates.register_background_index_update( "event_push_summary_unique_index2", index_name="event_push_summary_unique_index2", table="event_push_summary", diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 318fd7dc71..761b15a815 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -59,8 +59,9 @@ from synapse.metrics.background_process_metrics import ( run_as_background_process, wrap_as_background_process, ) -from synapse.replication.tcp.streams import BackfillStream +from synapse.replication.tcp.streams import BackfillStream, UnPartialStatedEventStream from synapse.replication.tcp.streams.events import EventsStream +from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause from synapse.storage.database import ( DatabasePool, @@ -70,6 +71,7 @@ from synapse.storage.database import ( from synapse.storage.engines import PostgresEngine from synapse.storage.types import Cursor from synapse.storage.util.id_generators import ( + AbstractStreamIdGenerator, AbstractStreamIdTracker, MultiWriterIdGenerator, StreamIdGenerator, @@ -292,6 +294,93 @@ class EventsWorkerStore(SQLBaseStore): id_column="chain_id", ) + self._un_partial_stated_events_stream_id_gen: AbstractStreamIdGenerator + + if isinstance(database.engine, PostgresEngine): + self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator( + db_conn=db_conn, + db=database, + stream_name="un_partial_stated_event_stream", + instance_name=hs.get_instance_name(), + tables=[ + ("un_partial_stated_event_stream", "instance_name", "stream_id") + ], + sequence_name="un_partial_stated_event_stream_sequence", + # TODO(faster_joins, multiple writers) Support multiple writers. + writers=["master"], + ) + else: + self._un_partial_stated_events_stream_id_gen = StreamIdGenerator( + db_conn, "un_partial_stated_event_stream", "stream_id" + ) + + def get_un_partial_stated_events_token(self) -> int: + # TODO(faster_joins, multiple writers): This is inappropriate if there are multiple + # writers because workers that don't write often will hold all + # readers up. + return self._un_partial_stated_events_stream_id_gen.get_current_token() + + async def get_un_partial_stated_events_from_stream( + self, instance_name: str, last_id: int, current_id: int, limit: int + ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + """Get updates for the un-partial-stated events replication stream. + + Args: + instance_name: The writer we want to fetch updates from. Unused + here since there is only ever one writer. + last_id: The token to fetch updates from. Exclusive. + current_id: The token to fetch updates up to. Inclusive. + limit: The requested limit for the number of rows to return. The + function may return more or fewer rows. + + Returns: + A tuple consisting of: the updates, a token to use to fetch + subsequent updates, and whether we returned fewer rows than exists + between the requested tokens due to the limit. + + The token returned can be used in a subsequent call to this + function to get further updatees. + + The updates are a list of 2-tuples of stream ID and the row data + """ + + if last_id == current_id: + return [], current_id, False + + def get_un_partial_stated_events_from_stream_txn( + txn: LoggingTransaction, + ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + sql = """ + SELECT stream_id, event_id, rejection_status_changed + FROM un_partial_stated_event_stream + WHERE ? < stream_id AND stream_id <= ? AND instance_name = ? + ORDER BY stream_id ASC + LIMIT ? + """ + txn.execute(sql, (last_id, current_id, instance_name, limit)) + updates = [ + ( + row[0], + ( + row[1], + bool(row[2]), + ), + ) + for row in txn + ] + limited = False + upto_token = current_id + if len(updates) >= limit: + upto_token = updates[-1][0] + limited = True + + return updates, upto_token, limited + + return await self.db_pool.runInteraction( + "get_un_partial_stated_events_from_stream", + get_un_partial_stated_events_from_stream_txn, + ) + def process_replication_rows( self, stream_name: str, @@ -303,6 +392,16 @@ class EventsWorkerStore(SQLBaseStore): self._stream_id_gen.advance(instance_name, token) elif stream_name == BackfillStream.NAME: self._backfill_id_gen.advance(instance_name, -token) + elif stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + + self.is_partial_state_event.invalidate((row.event_id,)) + + if row.rejection_status_changed: + # If the partial-stated event became rejected or unrejected + # when it wasn't before, we need to invalidate this cache. + self._invalidate_local_get_event_cache(row.event_id) super().process_replication_rows(stream_name, instance_name, token, rows) @@ -2292,6 +2391,9 @@ class EventsWorkerStore(SQLBaseStore): This can happen, for example, when resyncing state during a faster join. + It is the caller's responsibility to ensure that other workers are + sent a notification so that they call `_invalidate_local_get_event_cache()`. + Args: txn: event_id: ID of event to update @@ -2330,14 +2432,3 @@ class EventsWorkerStore(SQLBaseStore): ) self.invalidate_get_event_cache_after_txn(txn, event_id) - - # TODO(faster_joins): invalidate the cache on workers. Ideally we'd just - # call '_send_invalidation_to_replication', but we actually need the other - # end to call _invalidate_local_get_event_cache() rather than (just) - # _get_event_cache.invalidate(). - # - # One solution might be to (somehow) get the workers to call - # _invalidate_caches_for_event() (though that will invalidate more than - # strictly necessary). - # - # https://github.com/matrix-org/synapse/issues/12994 diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index c801a93b5b..f32cbb2dec 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -14,7 +14,7 @@ # limitations under the License. import collections.abc import logging -from typing import TYPE_CHECKING, Collection, Dict, Iterable, Optional, Set, Tuple +from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Set, Tuple import attr @@ -24,6 +24,8 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.logging.opentracing import trace +from synapse.replication.tcp.streams import UnPartialStatedEventStream +from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow from synapse.storage._base import SQLBaseStore from synapse.storage.database import ( DatabasePool, @@ -80,6 +82,21 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self._instance_name: str = hs.get_instance_name() + + def process_replication_rows( + self, + stream_name: str, + instance_name: str, + token: int, + rows: Iterable[Any], + ) -> None: + if stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + self._get_state_group_for_event.invalidate((row.event_id,)) + + super().process_replication_rows(stream_name, instance_name, token, rows) async def get_room_version(self, room_id: str) -> RoomVersion: """Get the room_version of a given room @@ -404,18 +421,21 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): context: EventContext, ) -> None: """Update the state group for a partial state event""" - await self.db_pool.runInteraction( - "update_state_for_partial_state_event", - self._update_state_for_partial_state_event_txn, - event, - context, - ) + async with self._un_partial_stated_events_stream_id_gen.get_next() as un_partial_state_event_stream_id: + await self.db_pool.runInteraction( + "update_state_for_partial_state_event", + self._update_state_for_partial_state_event_txn, + event, + context, + un_partial_state_event_stream_id, + ) def _update_state_for_partial_state_event_txn( self, txn: LoggingTransaction, event: EventBase, context: EventContext, + un_partial_state_event_stream_id: int, ) -> None: # we shouldn't have any outliers here assert not event.internal_metadata.is_outlier() @@ -436,7 +456,10 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): # the event may now be rejected where it was not before, or vice versa, # in which case we need to update the rejected flags. - if bool(context.rejected) != (event.rejected_reason is not None): + rejection_status_changed = bool(context.rejected) != ( + event.rejected_reason is not None + ) + if rejection_status_changed: self.mark_event_rejected_txn(txn, event.event_id, context.rejected) self.db_pool.simple_delete_one_txn( @@ -445,8 +468,6 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): keyvalues={"event_id": event.event_id}, ) - # TODO(faster_joins): need to do something about workers here - # https://github.com/matrix-org/synapse/issues/12994 txn.call_after(self.is_partial_state_event.invalidate, (event.event_id,)) txn.call_after( self._get_state_group_for_event.prefill, @@ -454,6 +475,17 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): state_group, ) + self.db_pool.simple_insert_txn( + txn, + "un_partial_stated_event_stream", + { + "stream_id": un_partial_state_event_stream_id, + "instance_name": self._instance_name, + "event_id": event.event_id, + "rejection_status_changed": rejection_status_changed, + }, + ) + class MainStateBackgroundUpdateStore(RoomMemberWorkerStore): diff --git a/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql b/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql new file mode 100644 index 0000000000..0e571f78c3 --- /dev/null +++ b/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql @@ -0,0 +1,34 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Stream for notifying that an event has become un-partial-stated. +CREATE TABLE un_partial_stated_event_stream( + -- Position in the stream + stream_id BIGINT PRIMARY KEY NOT NULL, + + -- Which instance wrote this entry. + instance_name TEXT NOT NULL, + + -- Which event has been un-partial-stated. + event_id TEXT NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, + + -- true iff the `rejected` status of the event changed when it became + -- un-partial-stated. + rejection_status_changed BOOLEAN NOT NULL +); + +-- We want an index here because of the foreign key constraint: +-- upon deleting an event, the database needs to be able to check here. +CREATE UNIQUE INDEX un_partial_stated_event_stream_room_id ON un_partial_stated_event_stream (event_id); diff --git a/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql b/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql new file mode 100644 index 0000000000..ec519ceebf --- /dev/null +++ b/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql @@ -0,0 +1,33 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- If a Synapse deployment made a large jump in versions (from < 1.62.0 to >= 1.70.0) +-- in a single upgrade then it might be possible for the event_push_summary_unique_index +-- to be created in the background from delta 71/02event_push_summary_unique.sql after +-- delta 73/06thread_notifications_thread_id_idx.sql is executed, causing it to +-- not drop the event_push_summary_unique_index index. +-- +-- See https://github.com/matrix-org/synapse/issues/14641 + +-- Stop the index from being scheduled for creation in the background. +DELETE FROM background_updates WHERE update_name = 'event_push_summary_unique_index'; + +-- The above background job also replaces another index, so ensure that side-effect +-- is applied. +DROP INDEX IF EXISTS event_push_summary_user_rm; + +-- Fix deployments which ran the 73/06thread_notifications_thread_id_idx.sql delta +-- before the event_push_summary_unique_index background job was run. +DROP INDEX IF EXISTS event_push_summary_unique_index; diff --git a/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres b/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres new file mode 100644 index 0000000000..1ec24702f3 --- /dev/null +++ b/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres @@ -0,0 +1,20 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE SEQUENCE IF NOT EXISTS un_partial_stated_event_stream_sequence; + +SELECT setval('un_partial_stated_event_stream_sequence', ( + SELECT COALESCE(MAX(stream_id), 1) FROM un_partial_stated_event_stream +)); diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 72227359b9..81df71a0c5 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -53,9 +53,9 @@ F = TypeVar("F", bound=Callable[..., Any]) class CachedFunction(Generic[F]): - invalidate: Any = None - invalidate_all: Any = None - prefill: Any = None + invalidate: Callable[[Tuple[Any, ...]], None] + invalidate_all: Callable[[], None] + prefill: Callable[[Tuple[Any, ...], Any], None] cache: Any = None num_args: Any = None diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index dcf0eac3bf..452d5d04c1 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -788,26 +788,21 @@ class LruCache(Generic[KT, VT]): def __contains__(self, key: KT) -> bool: return self.contains(key) - def set_cache_factor(self, factor: float) -> bool: + def set_cache_factor(self, factor: float) -> None: """ Set the cache factor for this individual cache. This will trigger a resize if it changes, which may require evicting items from the cache. - - Returns: - Whether the cache changed size or not. """ if not self.apply_cache_factor_from_config: - return False + return new_size = int(self._original_max_size * factor) if new_size != self.max_size: self.max_size = new_size if self._on_resize: self._on_resize() - return True - return False def __del__(self) -> None: # We're about to be deleted, so we make sure to clear up all the nodes diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index a3eb5f741b..340e5e9145 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -167,12 +167,10 @@ class ResponseCache(Generic[KV]): # the should_cache bit, we leave it in the cache for now and schedule # its removal later. if self.timeout_sec and context.should_cache: - self.clock.call_later( - self.timeout_sec, self._result_cache.pop, key, None - ) + self.clock.call_later(self.timeout_sec, self.unset, key) else: # otherwise, remove the result immediately. - self._result_cache.pop(key, None) + self.unset(key) return r # make sure we do this *after* adding the entry to result_cache, @@ -181,6 +179,14 @@ class ResponseCache(Generic[KV]): result.addBoth(on_complete) return entry + def unset(self, key: KV) -> None: + """Remove the cached value for this key from the cache, if any. + + Args: + key: key used to remove the cached value + """ + self._result_cache.pop(key, None) + async def wrap( self, key: KV, diff --git a/tests/config/test___main__.py b/tests/config/test___main__.py index b1c73d3612..cb5d4b05c3 100644 --- a/tests/config/test___main__.py +++ b/tests/config/test___main__.py @@ -17,15 +17,15 @@ from tests.config.utils import ConfigFileTestCase class ConfigMainFileTestCase(ConfigFileTestCase): - def test_executes_without_an_action(self): + def test_executes_without_an_action(self) -> None: self.generate_config() main(["", "-c", self.config_file]) - def test_read__error_if_key_not_found(self): + def test_read__error_if_key_not_found(self) -> None: self.generate_config() with self.assertRaises(SystemExit): main(["", "read", "foo.bar.hello", "-c", self.config_file]) - def test_read__passes_if_key_found(self): + def test_read__passes_if_key_found(self) -> None: self.generate_config() main(["", "read", "server.server_name", "-c", self.config_file]) diff --git a/tests/config/test_background_update.py b/tests/config/test_background_update.py index 0c32c1ca29..e4bad2ba6e 100644 --- a/tests/config/test_background_update.py +++ b/tests/config/test_background_update.py @@ -22,7 +22,7 @@ class BackgroundUpdateConfigTestCase(HomeserverTestCase): # Tests that the default values in the config are correctly loaded. Note that the default # values are loaded when the corresponding config options are commented out, which is why there isn't # a config specified here. - def test_default_configuration(self): + def test_default_configuration(self) -> None: background_updater = BackgroundUpdater( self.hs, self.hs.get_datastores().main.db_pool ) @@ -46,7 +46,7 @@ class BackgroundUpdateConfigTestCase(HomeserverTestCase): """ ) ) - def test_custom_configuration(self): + def test_custom_configuration(self) -> None: background_updater = BackgroundUpdater( self.hs, self.hs.get_datastores().main.db_pool ) diff --git a/tests/config/test_base.py b/tests/config/test_base.py index 6a52f862f4..3fbfe6c1da 100644 --- a/tests/config/test_base.py +++ b/tests/config/test_base.py @@ -24,13 +24,13 @@ from tests import unittest class BaseConfigTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: # The root object needs a server property with a public_baseurl. root = Mock() root.server.public_baseurl = "http://test" self.config = Config(root) - def test_loading_missing_templates(self): + def test_loading_missing_templates(self) -> None: # Use a temporary directory that exists on the system, but that isn't likely to # contain template files with tempfile.TemporaryDirectory() as tmp_dir: @@ -50,7 +50,7 @@ class BaseConfigTestCase(unittest.TestCase): "Template file did not contain our test string", ) - def test_loading_custom_templates(self): + def test_loading_custom_templates(self) -> None: # Use a temporary directory that exists on the system with tempfile.TemporaryDirectory() as tmp_dir: # Create a temporary bogus template file @@ -79,7 +79,7 @@ class BaseConfigTestCase(unittest.TestCase): "Template file did not contain our test string", ) - def test_multiple_custom_template_directories(self): + def test_multiple_custom_template_directories(self) -> None: """Tests that directories are searched in the right order if multiple custom template directories are provided. """ @@ -137,7 +137,7 @@ class BaseConfigTestCase(unittest.TestCase): for td in tempdirs: td.cleanup() - def test_loading_template_from_nonexistent_custom_directory(self): + def test_loading_template_from_nonexistent_custom_directory(self) -> None: with self.assertRaises(ConfigError): self.config.read_templates( ["some_filename.html"], ("a_nonexistent_directory",) diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py index d2b3c299e3..96f66af328 100644 --- a/tests/config/test_cache.py +++ b/tests/config/test_cache.py @@ -13,26 +13,27 @@ # limitations under the License. from synapse.config.cache import CacheConfig, add_resizable_cache +from synapse.types import JsonDict from synapse.util.caches.lrucache import LruCache from tests.unittest import TestCase class CacheConfigTests(TestCase): - def setUp(self): + def setUp(self) -> None: # Reset caches before each test since there's global state involved. self.config = CacheConfig() self.config.reset() - def tearDown(self): + def tearDown(self) -> None: # Also reset the caches after each test to leave state pristine. self.config.reset() - def test_individual_caches_from_environ(self): + def test_individual_caches_from_environ(self) -> None: """ Individual cache factors will be loaded from the environment. """ - config = {} + config: JsonDict = {} self.config._environ = { "SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2", "SYNAPSE_NOT_CACHE": "BLAH", @@ -42,15 +43,15 @@ class CacheConfigTests(TestCase): self.assertEqual(dict(self.config.cache_factors), {"something_or_other": 2.0}) - def test_config_overrides_environ(self): + def test_config_overrides_environ(self) -> None: """ Individual cache factors defined in the environment will take precedence over those in the config. """ - config = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}} self.config._environ = { "SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2", - "SYNAPSE_CACHE_FACTOR_FOO": 1, + "SYNAPSE_CACHE_FACTOR_FOO": "1", } self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() @@ -60,104 +61,104 @@ class CacheConfigTests(TestCase): {"foo": 1.0, "bar": 3.0, "something_or_other": 2.0}, ) - def test_individual_instantiated_before_config_load(self): + def test_individual_instantiated_before_config_load(self) -> None: """ If a cache is instantiated before the config is read, it will be given the default cache size in the interim, and then resized once the config is loaded. """ - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) - config = {"caches": {"per_cache_factors": {"foo": 3}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 3}}} self.config.read_config(config) self.config.resize_all_caches() self.assertEqual(cache.max_size, 300) - def test_individual_instantiated_after_config_load(self): + def test_individual_instantiated_after_config_load(self) -> None: """ If a cache is instantiated after the config is read, it will be immediately resized to the correct size given the per_cache_factor if there is one. """ - config = {"caches": {"per_cache_factors": {"foo": 2}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 2}}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 200) - def test_global_instantiated_before_config_load(self): + def test_global_instantiated_before_config_load(self) -> None: """ If a cache is instantiated before the config is read, it will be given the default cache size in the interim, and then resized to the new default cache size once the config is loaded. """ - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) - config = {"caches": {"global_factor": 4}} + config: JsonDict = {"caches": {"global_factor": 4}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() self.assertEqual(cache.max_size, 400) - def test_global_instantiated_after_config_load(self): + def test_global_instantiated_after_config_load(self) -> None: """ If a cache is instantiated after the config is read, it will be immediately resized to the correct size given the global factor if there is no per-cache factor. """ - config = {"caches": {"global_factor": 1.5}} + config: JsonDict = {"caches": {"global_factor": 1.5}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 150) - def test_cache_with_asterisk_in_name(self): + def test_cache_with_asterisk_in_name(self) -> None: """Some caches have asterisks in their name, test that they are set correctly.""" - config = { + config: JsonDict = { "caches": { "per_cache_factors": {"*cache_a*": 5, "cache_b": 6, "cache_c": 2} } } self.config._environ = { "SYNAPSE_CACHE_FACTOR_CACHE_A": "2", - "SYNAPSE_CACHE_FACTOR_CACHE_B": 3, + "SYNAPSE_CACHE_FACTOR_CACHE_B": "3", } self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache_a = LruCache(100) + cache_a: LruCache = LruCache(100) add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor) self.assertEqual(cache_a.max_size, 200) - cache_b = LruCache(100) + cache_b: LruCache = LruCache(100) add_resizable_cache("*Cache_b*", cache_resize_callback=cache_b.set_cache_factor) self.assertEqual(cache_b.max_size, 300) - cache_c = LruCache(100) + cache_c: LruCache = LruCache(100) add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor) self.assertEqual(cache_c.max_size, 200) - def test_apply_cache_factor_from_config(self): + def test_apply_cache_factor_from_config(self) -> None: """Caches can disable applying cache factor updates, mainly used by event cache size. """ - config = {"caches": {"event_cache_size": "10k"}} + config: JsonDict = {"caches": {"event_cache_size": "10k"}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache( + cache: LruCache = LruCache( max_size=self.config.event_cache_size, apply_cache_factor_from_config=False, ) diff --git a/tests/config/test_database.py b/tests/config/test_database.py index 9eca10bbe9..240277bcc6 100644 --- a/tests/config/test_database.py +++ b/tests/config/test_database.py @@ -20,7 +20,7 @@ from tests import unittest class DatabaseConfigTestCase(unittest.TestCase): - def test_database_configured_correctly(self): + def test_database_configured_correctly(self) -> None: conf = yaml.safe_load( DatabaseConfig().generate_config_section(data_dir_path="/data_dir_path") ) diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index fdfbb0e38e..3a02366932 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -25,14 +25,14 @@ from tests import unittest class ConfigGenerationTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, "homeserver.yaml") - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self.dir) - def test_generate_config_generates_files(self): + def test_generate_config_generates_files(self) -> None: with redirect_stdout(StringIO()): HomeServerConfig.load_or_generate_config( "", @@ -56,7 +56,7 @@ class ConfigGenerationTestCase(unittest.TestCase): os.path.join(os.getcwd(), "homeserver.log"), ) - def assert_log_filename_is(self, log_config_file, expected): + def assert_log_filename_is(self, log_config_file: str, expected: str) -> None: with open(log_config_file) as f: config = f.read() # find the 'filename' line diff --git a/tests/config/test_load.py b/tests/config/test_load.py index 69a4e9413b..fcbe79cc7a 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -21,14 +21,14 @@ from tests.config.utils import ConfigFileTestCase class ConfigLoadingFileTestCase(ConfigFileTestCase): - def test_load_fails_if_server_name_missing(self): + def test_load_fails_if_server_name_missing(self) -> None: self.generate_config_and_remove_lines_containing("server_name") with self.assertRaises(ConfigError): HomeServerConfig.load_config("", ["-c", self.config_file]) with self.assertRaises(ConfigError): HomeServerConfig.load_or_generate_config("", ["-c", self.config_file]) - def test_generates_and_loads_macaroon_secret_key(self): + def test_generates_and_loads_macaroon_secret_key(self) -> None: self.generate_config() with open(self.config_file) as f: @@ -58,7 +58,7 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): "was: %r" % (config2.key.macaroon_secret_key,) ) - def test_load_succeeds_if_macaroon_secret_key_missing(self): + def test_load_succeeds_if_macaroon_secret_key_missing(self) -> None: self.generate_config_and_remove_lines_containing("macaroon") config1 = HomeServerConfig.load_config("", ["-c", self.config_file]) config2 = HomeServerConfig.load_config("", ["-c", self.config_file]) @@ -73,7 +73,7 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): config1.key.macaroon_secret_key, config3.key.macaroon_secret_key ) - def test_disable_registration(self): + def test_disable_registration(self) -> None: self.generate_config() self.add_lines_to_config( ["enable_registration: true", "disable_registration: true"] @@ -93,7 +93,7 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): assert config3 is not None self.assertTrue(config3.registration.enable_registration) - def test_stats_enabled(self): + def test_stats_enabled(self) -> None: self.generate_config_and_remove_lines_containing("enable_metrics") self.add_lines_to_config(["enable_metrics: true"]) @@ -101,7 +101,7 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): config = HomeServerConfig.load_config("", ["-c", self.config_file]) self.assertFalse(config.metrics.metrics_flags.known_servers) - def test_depreciated_identity_server_flag_throws_error(self): + def test_depreciated_identity_server_flag_throws_error(self) -> None: self.generate_config() # Needed to ensure that actual key/value pair added below don't end up on a line with a comment self.add_lines_to_config([" "]) diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py index 1b63e1adfd..f12147eaa0 100644 --- a/tests/config/test_ratelimiting.py +++ b/tests/config/test_ratelimiting.py @@ -18,7 +18,7 @@ from tests.utils import default_config class RatelimitConfigTestCase(TestCase): - def test_parse_rc_federation(self): + def test_parse_rc_federation(self) -> None: config_dict = default_config("test") config_dict["rc_federation"] = { "window_size": 20000, diff --git a/tests/config/test_registration_config.py b/tests/config/test_registration_config.py index 33d7b70e32..f6869d7f06 100644 --- a/tests/config/test_registration_config.py +++ b/tests/config/test_registration_config.py @@ -21,7 +21,7 @@ from tests.utils import default_config class RegistrationConfigTestCase(ConfigFileTestCase): - def test_session_lifetime_must_not_be_exceeded_by_smaller_lifetimes(self): + def test_session_lifetime_must_not_be_exceeded_by_smaller_lifetimes(self) -> None: """ session_lifetime should logically be larger than, or at least as large as, all the different token lifetimes. @@ -91,7 +91,7 @@ class RegistrationConfigTestCase(ConfigFileTestCase): "", ) - def test_refuse_to_start_if_open_registration_and_no_verification(self): + def test_refuse_to_start_if_open_registration_and_no_verification(self) -> None: self.generate_config() self.add_lines_to_config( [ diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index db745815ef..297ab37792 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -20,7 +20,7 @@ from tests import unittest class RoomDirectoryConfigTestCase(unittest.TestCase): - def test_alias_creation_acl(self): + def test_alias_creation_acl(self) -> None: config = yaml.safe_load( """ alias_creation_rules: @@ -78,7 +78,7 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): ) ) - def test_room_publish_acl(self): + def test_room_publish_acl(self) -> None: config = yaml.safe_load( """ alias_creation_rules: [] diff --git a/tests/config/test_server.py b/tests/config/test_server.py index 1f27a54701..41a3fb0b6d 100644 --- a/tests/config/test_server.py +++ b/tests/config/test_server.py @@ -21,7 +21,7 @@ from tests import unittest class ServerConfigTestCase(unittest.TestCase): - def test_is_threepid_reserved(self): + def test_is_threepid_reserved(self) -> None: user1 = {"medium": "email", "address": "user1@example.com"} user2 = {"medium": "email", "address": "user2@example.com"} user3 = {"medium": "email", "address": "user3@example.com"} @@ -32,7 +32,7 @@ class ServerConfigTestCase(unittest.TestCase): self.assertFalse(is_threepid_reserved(config, user3)) self.assertFalse(is_threepid_reserved(config, user1_msisdn)) - def test_unsecure_listener_no_listeners_open_private_ports_false(self): + def test_unsecure_listener_no_listeners_open_private_ports_false(self) -> None: conf = yaml.safe_load( ServerConfig().generate_config_section( "CONFDIR", "/data_dir_path", "che.org", False, None @@ -52,7 +52,7 @@ class ServerConfigTestCase(unittest.TestCase): self.assertEqual(conf["listeners"], expected_listeners) - def test_unsecure_listener_no_listeners_open_private_ports_true(self): + def test_unsecure_listener_no_listeners_open_private_ports_true(self) -> None: conf = yaml.safe_load( ServerConfig().generate_config_section( "CONFDIR", "/data_dir_path", "che.org", True, None @@ -71,7 +71,7 @@ class ServerConfigTestCase(unittest.TestCase): self.assertEqual(conf["listeners"], expected_listeners) - def test_listeners_set_correctly_open_private_ports_false(self): + def test_listeners_set_correctly_open_private_ports_false(self) -> None: listeners = [ { "port": 8448, @@ -95,7 +95,7 @@ class ServerConfigTestCase(unittest.TestCase): self.assertEqual(conf["listeners"], listeners) - def test_listeners_set_correctly_open_private_ports_true(self): + def test_listeners_set_correctly_open_private_ports_true(self) -> None: listeners = [ { "port": 8448, @@ -131,14 +131,14 @@ class ServerConfigTestCase(unittest.TestCase): class GenerateIpSetTestCase(unittest.TestCase): - def test_empty(self): + def test_empty(self) -> None: ip_set = generate_ip_set(()) self.assertFalse(ip_set) ip_set = generate_ip_set((), ()) self.assertFalse(ip_set) - def test_generate(self): + def test_generate(self) -> None: """Check adding IPv4 and IPv6 addresses.""" # IPv4 address ip_set = generate_ip_set(("1.2.3.4",)) @@ -160,7 +160,7 @@ class GenerateIpSetTestCase(unittest.TestCase): ip_set = generate_ip_set(("1.2.3.4", "::1.2.3.4")) self.assertEqual(len(ip_set.iter_cidrs()), 4) - def test_extra(self): + def test_extra(self) -> None: """Extra IP addresses are treated the same.""" ip_set = generate_ip_set((), ("1.2.3.4",)) self.assertEqual(len(ip_set.iter_cidrs()), 4) @@ -172,7 +172,7 @@ class GenerateIpSetTestCase(unittest.TestCase): ip_set = generate_ip_set(("1.2.3.4",), ("1.2.3.4",)) self.assertEqual(len(ip_set.iter_cidrs()), 4) - def test_bad_value(self): + def test_bad_value(self) -> None: """An error should be raised if a bad value is passed in.""" with self.assertRaises(ConfigError): generate_ip_set(("not-an-ip",)) diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index 9ba5781573..7510fc4643 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -13,13 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import cast + import idna from OpenSSL import SSL from synapse.config._base import Config, RootConfig +from synapse.config.homeserver import HomeServerConfig from synapse.config.tls import ConfigError, TlsConfig -from synapse.crypto.context_factory import FederationPolicyForHTTPS +from synapse.crypto.context_factory import ( + FederationPolicyForHTTPS, + SSLClientConnectionCreator, +) +from synapse.types import JsonDict from tests.unittest import TestCase @@ -27,7 +34,7 @@ from tests.unittest import TestCase class FakeServer(Config): section = "server" - def has_tls_listener(self): + def has_tls_listener(self) -> bool: return False @@ -36,21 +43,21 @@ class TestConfig(RootConfig): class TLSConfigTests(TestCase): - def test_tls_client_minimum_default(self): + def test_tls_client_minimum_default(self) -> None: """ The default client TLS version is 1.0. """ - config = {} + config: JsonDict = {} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1") - def test_tls_client_minimum_set(self): + def test_tls_client_minimum_set(self) -> None: """ The default client TLS version can be set to 1.0, 1.1, and 1.2. """ - config = {"federation_client_minimum_tls_version": 1} + config: JsonDict = {"federation_client_minimum_tls_version": 1} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1") @@ -76,7 +83,7 @@ class TLSConfigTests(TestCase): t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.2") - def test_tls_client_minimum_1_point_3_missing(self): + def test_tls_client_minimum_1_point_3_missing(self) -> None: """ If TLS 1.3 support is missing and it's configured, it will raise a ConfigError. @@ -88,7 +95,7 @@ class TLSConfigTests(TestCase): self.addCleanup(setattr, SSL, "SSL.OP_NO_TLSv1_3", OP_NO_TLSv1_3) assert not hasattr(SSL, "OP_NO_TLSv1_3") - config = {"federation_client_minimum_tls_version": 1.3} + config: JsonDict = {"federation_client_minimum_tls_version": 1.3} t = TestConfig() with self.assertRaises(ConfigError) as e: t.tls.read_config(config, config_dir_path="", data_dir_path="") @@ -100,7 +107,7 @@ class TLSConfigTests(TestCase): ), ) - def test_tls_client_minimum_1_point_3_exists(self): + def test_tls_client_minimum_1_point_3_exists(self) -> None: """ If TLS 1.3 support exists and it's configured, it will be settable. """ @@ -110,20 +117,20 @@ class TLSConfigTests(TestCase): self.addCleanup(lambda: delattr(SSL, "OP_NO_TLSv1_3")) assert hasattr(SSL, "OP_NO_TLSv1_3") - config = {"federation_client_minimum_tls_version": 1.3} + config: JsonDict = {"federation_client_minimum_tls_version": 1.3} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.3") - def test_tls_client_minimum_set_passed_through_1_2(self): + def test_tls_client_minimum_set_passed_through_1_2(self) -> None: """ The configured TLS version is correctly configured by the ContextFactory. """ - config = {"federation_client_minimum_tls_version": 1.2} + config: JsonDict = {"federation_client_minimum_tls_version": 1.2} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) options = _get_ssl_context_options(cf._verify_ssl_context) # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2 @@ -131,15 +138,15 @@ class TLSConfigTests(TestCase): self.assertNotEqual(options & SSL.OP_NO_TLSv1_1, 0) self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0) - def test_tls_client_minimum_set_passed_through_1_0(self): + def test_tls_client_minimum_set_passed_through_1_0(self) -> None: """ The configured TLS version is correctly configured by the ContextFactory. """ - config = {"federation_client_minimum_tls_version": 1} + config: JsonDict = {"federation_client_minimum_tls_version": 1} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) options = _get_ssl_context_options(cf._verify_ssl_context) # The context has not had any of the NO_TLS set. @@ -147,11 +154,11 @@ class TLSConfigTests(TestCase): self.assertEqual(options & SSL.OP_NO_TLSv1_1, 0) self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0) - def test_whitelist_idna_failure(self): + def test_whitelist_idna_failure(self) -> None: """ The federation certificate whitelist will not allow IDNA domain names. """ - config = { + config: JsonDict = { "federation_certificate_verification_whitelist": [ "example.com", "*.ドメイン.テスト", @@ -163,11 +170,11 @@ class TLSConfigTests(TestCase): ) self.assertIn("IDNA domain names", str(e)) - def test_whitelist_idna_result(self): + def test_whitelist_idna_result(self) -> None: """ The federation certificate whitelist will match on IDNA encoded names. """ - config = { + config: JsonDict = { "federation_certificate_verification_whitelist": [ "example.com", "*.xn--eckwd4c7c.xn--zckzah", @@ -176,14 +183,16 @@ class TLSConfigTests(TestCase): t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) # Not in the whitelist opts = cf.get_options(b"notexample.com") + assert isinstance(opts, SSLClientConnectionCreator) self.assertTrue(opts._verifier._verify_certs) # Caught by the wildcard opts = cf.get_options(idna.encode("テスト.ドメイン.テスト")) + assert isinstance(opts, SSLClientConnectionCreator) self.assertFalse(opts._verifier._verify_certs) @@ -191,4 +200,4 @@ def _get_ssl_context_options(ssl_context: SSL.Context) -> int: """get the options bits from an openssl context object""" # the OpenSSL.SSL.Context wrapper doesn't expose get_options, so we have to # use the low-level interface - return SSL._lib.SSL_CTX_get_options(ssl_context._context) + return SSL._lib.SSL_CTX_get_options(ssl_context._context) # type: ignore[attr-defined] diff --git a/tests/config/test_util.py b/tests/config/test_util.py index 3d4929daac..7073654832 100644 --- a/tests/config/test_util.py +++ b/tests/config/test_util.py @@ -21,7 +21,7 @@ from tests.unittest import TestCase class ValidateConfigTestCase(TestCase): """Test cases for synapse.config._util.validate_config""" - def test_bad_object_in_array(self): + def test_bad_object_in_array(self) -> None: """malformed objects within an array should be validated correctly""" # consider a structure: diff --git a/tests/config/utils.py b/tests/config/utils.py index 94c18a052b..4c0e8a064a 100644 --- a/tests/config/utils.py +++ b/tests/config/utils.py @@ -17,19 +17,20 @@ import tempfile import unittest from contextlib import redirect_stdout from io import StringIO +from typing import List from synapse.config.homeserver import HomeServerConfig class ConfigFileTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.dir = tempfile.mkdtemp() self.config_file = os.path.join(self.dir, "homeserver.yaml") - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self.dir) - def generate_config(self): + def generate_config(self) -> None: with redirect_stdout(StringIO()): HomeServerConfig.load_or_generate_config( "", @@ -43,7 +44,7 @@ class ConfigFileTestCase(unittest.TestCase): ], ) - def generate_config_and_remove_lines_containing(self, needle): + def generate_config_and_remove_lines_containing(self, needle: str) -> None: self.generate_config() with open(self.config_file) as f: @@ -52,7 +53,7 @@ class ConfigFileTestCase(unittest.TestCase): with open(self.config_file, "w") as f: f.write("".join(contents)) - def add_lines_to_config(self, lines): + def add_lines_to_config(self, lines: List[str]) -> None: with open(self.config_file, "a") as f: for line in lines: f.write(line + "\n") diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 57bfbd7734..a7495ab21a 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -31,7 +31,7 @@ from synapse.appservice import ( from synapse.handlers.appservice import ApplicationServicesHandler from synapse.rest.client import login, receipts, register, room, sendtodevice from synapse.server import HomeServer -from synapse.types import RoomStreamToken +from synapse.types import JsonDict, RoomStreamToken from synapse.util import Clock from synapse.util.stringutils import random_string @@ -44,7 +44,7 @@ from tests.utils import MockClock class AppServiceHandlerTestCase(unittest.TestCase): """Tests the ApplicationServicesHandler.""" - def setUp(self): + def setUp(self) -> None: self.mock_store = Mock() self.mock_as_api = Mock() self.mock_scheduler = Mock() @@ -61,7 +61,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.handler = ApplicationServicesHandler(hs) self.event_source = hs.get_event_sources() - def test_notify_interested_services(self): + def test_notify_interested_services(self) -> None: interested_service = self._mkservice(is_interested_in_event=True) services = [ self._mkservice(is_interested_in_event=False), @@ -90,7 +90,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): interested_service, events=[event] ) - def test_query_user_exists_unknown_user(self): + def test_query_user_exists_unknown_user(self) -> None: user_id = "@someone:anywhere" services = [self._mkservice(is_interested_in_event=True)] services[0].is_interested_in_user.return_value = True @@ -107,7 +107,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_as_api.query_user.assert_called_once_with(services[0], user_id) - def test_query_user_exists_known_user(self): + def test_query_user_exists_known_user(self) -> None: user_id = "@someone:anywhere" services = [self._mkservice(is_interested_in_event=True)] services[0].is_interested_in_user.return_value = True @@ -127,7 +127,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): "query_user called when it shouldn't have been.", ) - def test_query_room_alias_exists(self): + def test_query_room_alias_exists(self) -> None: room_alias_str = "#foo:bar" room_alias = Mock() room_alias.to_string.return_value = room_alias_str @@ -157,7 +157,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.assertEqual(result.room_id, room_id) self.assertEqual(result.servers, servers) - def test_get_3pe_protocols_no_appservices(self): + def test_get_3pe_protocols_no_appservices(self) -> None: self.mock_store.get_app_services.return_value = [] response = self.successResultOf( defer.ensureDeferred(self.handler.get_3pe_protocols("my-protocol")) @@ -165,7 +165,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_as_api.get_3pe_protocol.assert_not_called() self.assertEqual(response, {}) - def test_get_3pe_protocols_no_protocols(self): + def test_get_3pe_protocols_no_protocols(self) -> None: service = self._mkservice(False, []) self.mock_store.get_app_services.return_value = [service] response = self.successResultOf( @@ -174,7 +174,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_as_api.get_3pe_protocol.assert_not_called() self.assertEqual(response, {}) - def test_get_3pe_protocols_protocol_no_response(self): + def test_get_3pe_protocols_protocol_no_response(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable(None) @@ -186,7 +186,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): ) self.assertEqual(response, {}) - def test_get_3pe_protocols_select_one_protocol(self): + def test_get_3pe_protocols_select_one_protocol(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable( @@ -202,7 +202,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): response, {"my-protocol": {"x-protocol-data": 42, "instances": []}} ) - def test_get_3pe_protocols_one_protocol(self): + def test_get_3pe_protocols_one_protocol(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable( @@ -218,7 +218,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): response, {"my-protocol": {"x-protocol-data": 42, "instances": []}} ) - def test_get_3pe_protocols_multiple_protocol(self): + def test_get_3pe_protocols_multiple_protocol(self) -> None: service_one = self._mkservice(False, ["my-protocol"]) service_two = self._mkservice(False, ["other-protocol"]) self.mock_store.get_app_services.return_value = [service_one, service_two] @@ -237,11 +237,13 @@ class AppServiceHandlerTestCase(unittest.TestCase): }, ) - def test_get_3pe_protocols_multiple_info(self): + def test_get_3pe_protocols_multiple_info(self) -> None: service_one = self._mkservice(False, ["my-protocol"]) service_two = self._mkservice(False, ["my-protocol"]) - async def get_3pe_protocol(service, unusedProtocol): + async def get_3pe_protocol( + service: ApplicationService, protocol: str + ) -> Optional[JsonDict]: if service == service_one: return { "x-protocol-data": 42, @@ -276,7 +278,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): }, ) - def test_notify_interested_services_ephemeral(self): + def test_notify_interested_services_ephemeral(self) -> None: """ Test sending ephemeral events to the appservice handler are scheduled to be pushed out to interested appservices, and that the stream ID is @@ -306,7 +308,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): 580, ) - def test_notify_interested_services_ephemeral_out_of_order(self): + def test_notify_interested_services_ephemeral_out_of_order(self) -> None: """ Test sending out of order ephemeral events to the appservice handler are ignored. @@ -390,7 +392,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): receipts.register_servlets, ] - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs # Mock the ApplicationServiceScheduler's _TransactionController's send method so that # we can track any outgoing ephemeral events @@ -417,7 +419,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): "exclusive_as_user", "password", self.exclusive_as_user_device_id ) - def _notify_interested_services(self): + def _notify_interested_services(self) -> None: # This is normally set in `notify_interested_services` but we need to call the # internal async version so the reactor gets pushed to completion. self.hs.get_application_service_handler().current_max += 1 @@ -443,7 +445,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): ) def test_match_interesting_room_members( self, interesting_user: str, should_notify: bool - ): + ) -> None: """ Test to make sure that a interesting user (local or remote) in the room is notified as expected when someone else in the room sends a message. @@ -512,7 +514,9 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): else: self.send_mock.assert_not_called() - def test_application_services_receive_events_sent_by_interesting_local_user(self): + def test_application_services_receive_events_sent_by_interesting_local_user( + self, + ) -> None: """ Test to make sure that a messages sent from a local user can be interesting and picked up by the appservice. @@ -568,7 +572,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): self.assertEqual(events[0]["type"], "m.room.message") self.assertEqual(events[0]["sender"], alice) - def test_sending_read_receipt_batches_to_application_services(self): + def test_sending_read_receipt_batches_to_application_services(self) -> None: """Tests that a large batch of read receipts are sent correctly to interested application services. """ @@ -644,7 +648,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): @unittest.override_config( {"experimental_features": {"msc2409_to_device_messages_enabled": True}} ) - def test_application_services_receive_local_to_device(self): + def test_application_services_receive_local_to_device(self) -> None: """ Test that when a user sends a to-device message to another user that is an application service's user namespace, the @@ -722,7 +726,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): @unittest.override_config( {"experimental_features": {"msc2409_to_device_messages_enabled": True}} ) - def test_application_services_receive_bursts_of_to_device(self): + def test_application_services_receive_bursts_of_to_device(self) -> None: """ Test that when a user sends >100 to-device messages at once, any interested AS's will receive them in separate transactions. @@ -913,7 +917,7 @@ class ApplicationServicesHandlerDeviceListsTestCase(unittest.HomeserverTestCase) experimental_feature_enabled: bool, as_supports_txn_extensions: bool, as_should_receive_device_list_updates: bool, - ): + ) -> None: """ Tests that an application service receives notice of changed device lists for a user, when a user changes their device lists. @@ -1070,7 +1074,7 @@ class ApplicationServicesHandlerOtkCountsTestCase(unittest.HomeserverTestCase): and a room for the users to talk in. """ - async def preparation(): + async def preparation() -> None: await self._add_otks_for_device(self._sender_user, self._sender_device, 42) await self._add_fallback_key_for_device( self._sender_user, self._sender_device, used=True diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index 2b21547d0f..2733719d82 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -199,7 +199,7 @@ class CasHandlerTestCase(HomeserverTestCase): ) -def _mock_request(): +def _mock_request() -> Mock: """Returns a mock which will stand in as a SynapseRequest""" mock = Mock( spec=[ diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 3b72c4c9d0..90aec484c4 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -20,6 +20,7 @@ from twisted.test.proto_helpers import MemoryReactor import synapse.api.errors import synapse.rest.admin from synapse.api.constants import EventTypes +from synapse.events import EventBase from synapse.rest.client import directory, login, room from synapse.server import HomeServer from synapse.types import JsonDict, RoomAlias, create_requester @@ -201,7 +202,7 @@ class TestDeleteAlias(unittest.HomeserverTestCase): self.test_user_tok = self.login("user", "pass") self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok) - def _create_alias(self, user) -> None: + def _create_alias(self, user: str) -> None: # Create a new alias to this room. self.get_success( self.store.create_room_alias_association( @@ -324,7 +325,7 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): ) return room_alias - def _set_canonical_alias(self, content) -> None: + def _set_canonical_alias(self, content: JsonDict) -> None: """Configure the canonical alias state on the room.""" self.helper.send_state( self.room_id, @@ -333,13 +334,15 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): tok=self.admin_user_tok, ) - def _get_canonical_alias(self): + def _get_canonical_alias(self) -> EventBase: """Get the canonical alias state of the room.""" - return self.get_success( + result = self.get_success( self._storage_controllers.state.get_current_state_event( self.room_id, EventTypes.CanonicalAlias, "" ) ) + assert result is not None + return result def test_remove_alias(self) -> None: """Removing an alias that is the canonical alias should remove it there too.""" @@ -349,8 +352,8 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) - self.assertEqual(data["content"]["alt_aliases"], [self.test_alias]) + self.assertEqual(data.content["alias"], self.test_alias) + self.assertEqual(data.content["alt_aliases"], [self.test_alias]) # Finally, delete the alias. self.get_success( @@ -360,8 +363,8 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): ) data = self._get_canonical_alias() - self.assertNotIn("alias", data["content"]) - self.assertNotIn("alt_aliases", data["content"]) + self.assertNotIn("alias", data.content) + self.assertNotIn("alt_aliases", data.content) def test_remove_other_alias(self) -> None: """Removing an alias listed as in alt_aliases should remove it there too.""" @@ -378,9 +381,9 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) + self.assertEqual(data.content["alias"], self.test_alias) self.assertEqual( - data["content"]["alt_aliases"], [self.test_alias, other_test_alias] + data.content["alt_aliases"], [self.test_alias, other_test_alias] ) # Delete the second alias. @@ -391,8 +394,8 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase): ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) - self.assertEqual(data["content"]["alt_aliases"], [self.test_alias]) + self.assertEqual(data.content["alias"], self.test_alias) + self.assertEqual(data.content["alt_aliases"], [self.test_alias]) class TestCreateAliasACL(unittest.HomeserverTestCase): diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 9b7e7a8e9a..6c0b30de9e 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -17,7 +17,11 @@ import copy from unittest import mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.errors import SynapseError +from synapse.server import HomeServer +from synapse.util import Clock from tests import unittest @@ -39,14 +43,14 @@ room_keys = { class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: return self.setup_test_homeserver(replication_layer=mock.Mock()) - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = hs.get_e2e_room_keys_handler() self.local_user = "@boris:" + hs.hostname - def test_get_missing_current_version_info(self): + def test_get_missing_current_version_info(self) -> None: """Check that we get a 404 if we ask for info about the current version if there is no version. """ @@ -56,7 +60,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_version_info(self): + def test_get_missing_version_info(self) -> None: """Check that we get a 404 if we ask for info about a specific version if it doesn't exist. """ @@ -67,9 +71,9 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_create_version(self): + def test_create_version(self) -> None: """Check that we can create and then retrieve versions.""" - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -78,7 +82,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) ) - self.assertEqual(res, "1") + self.assertEqual(version, "1") # check we can retrieve it as the current version res = self.get_success(self.handler.get_version_info(self.local_user)) @@ -110,7 +114,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): ) # upload a new one... - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -119,7 +123,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) ) - self.assertEqual(res, "2") + self.assertEqual(version, "2") # check we can retrieve it as the current version res = self.get_success(self.handler.get_version_info(self.local_user)) @@ -134,7 +138,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) - def test_update_version(self): + def test_update_version(self) -> None: """Check that we can update versions.""" version = self.get_success( self.handler.create_version( @@ -173,7 +177,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) - def test_update_missing_version(self): + def test_update_missing_version(self) -> None: """Check that we get a 404 on updating nonexistent versions""" e = self.get_failure( self.handler.update_version( @@ -190,7 +194,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_update_omitted_version(self): + def test_update_omitted_version(self) -> None: """Check that the update succeeds if the version is missing from the body""" version = self.get_success( self.handler.create_version( @@ -227,7 +231,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) - def test_update_bad_version(self): + def test_update_bad_version(self) -> None: """Check that we get a 400 if the version in the body doesn't match""" version = self.get_success( self.handler.create_version( @@ -255,7 +259,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 400) - def test_delete_missing_version(self): + def test_delete_missing_version(self) -> None: """Check that we get a 404 on deleting nonexistent versions""" e = self.get_failure( self.handler.delete_version(self.local_user, "1"), SynapseError @@ -263,15 +267,15 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_delete_missing_current_version(self): + def test_delete_missing_current_version(self) -> None: """Check that we get a 404 on deleting nonexistent current version""" e = self.get_failure(self.handler.delete_version(self.local_user), SynapseError) res = e.value.code self.assertEqual(res, 404) - def test_delete_version(self): + def test_delete_version(self) -> None: """Check that we can create and then delete versions.""" - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -280,7 +284,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) ) - self.assertEqual(res, "1") + self.assertEqual(version, "1") # check we can delete it self.get_success(self.handler.delete_version(self.local_user, "1")) @@ -292,7 +296,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_backup(self): + def test_get_missing_backup(self) -> None: """Check that we get a 404 on querying missing backup""" e = self.get_failure( self.handler.get_room_keys(self.local_user, "bogus_version"), SynapseError @@ -300,7 +304,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_room_keys(self): + def test_get_missing_room_keys(self) -> None: """Check we get an empty response from an empty backup""" version = self.get_success( self.handler.create_version( @@ -319,7 +323,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): # TODO: test the locking semantics when uploading room_keys, # although this is probably best done in sytest - def test_upload_room_keys_no_versions(self): + def test_upload_room_keys_no_versions(self) -> None: """Check that we get a 404 on uploading keys when no versions are defined""" e = self.get_failure( self.handler.upload_room_keys(self.local_user, "no_version", room_keys), @@ -328,7 +332,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_upload_room_keys_bogus_version(self): + def test_upload_room_keys_bogus_version(self) -> None: """Check that we get a 404 on uploading keys when an nonexistent version is specified """ @@ -350,7 +354,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 404) - def test_upload_room_keys_wrong_version(self): + def test_upload_room_keys_wrong_version(self) -> None: """Check that we get a 403 on uploading keys for an old version""" version = self.get_success( self.handler.create_version( @@ -380,7 +384,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): res = e.value.code self.assertEqual(res, 403) - def test_upload_room_keys_insert(self): + def test_upload_room_keys_insert(self) -> None: """Check that we can insert and retrieve keys for a session""" version = self.get_success( self.handler.create_version( @@ -416,7 +420,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): ) self.assertDictEqual(res, room_keys) - def test_upload_room_keys_merge(self): + def test_upload_room_keys_merge(self) -> None: """Check that we can upload a new room_key for an existing session and have it correctly merged""" version = self.get_success( @@ -449,9 +453,11 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "SSBBTSBBIEZJU0gK", ) @@ -465,9 +471,12 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "new" + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + "new", ) # the etag should NOT be equal now, since the key changed @@ -483,9 +492,12 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "new" + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + "new", ) # the etag should be the same since the session did not change @@ -494,7 +506,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): # TODO: check edge cases as well as the common variations here - def test_delete_room_keys(self): + def test_delete_room_keys(self) -> None: """Check that we can insert and delete keys for a session""" version = self.get_success( self.handler.create_version( diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index d00c69c229..cedbb9fafc 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -439,7 +439,7 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): user_id = self.register_user("kermit", "test") tok = self.login("kermit", "test") - def create_invite(): + def create_invite() -> EventBase: room_id = self.helper.create_room_as(room_creator=user_id, tok=tok) room_version = self.get_success(self.store.get_room_version(room_id)) return event_from_pdu_json( diff --git a/tests/handlers/test_federation_event.py b/tests/handlers/test_federation_event.py index e448cb1901..70ea4d15d4 100644 --- a/tests/handlers/test_federation_event.py +++ b/tests/handlers/test_federation_event.py @@ -14,6 +14,8 @@ from typing import Optional from unittest import mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.errors import AuthError, StoreError from synapse.api.room_versions import RoomVersion from synapse.event_auth import ( @@ -26,8 +28,10 @@ from synapse.federation.transport.client import StateRequestResponse from synapse.logging.context import LoggingContext from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.state.v2 import _mainline_sort, _reverse_topological_power_sort from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest from tests.test_utils import event_injection, make_awaitable @@ -40,7 +44,7 @@ class FederationEventHandlerTests(unittest.FederatingHomeserverTestCase): room.register_servlets, ] - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: # mock out the federation transport client self.mock_federation_transport_client = mock.Mock( spec=["get_room_state_ids", "get_room_state", "get_event", "backfill"] @@ -165,7 +169,9 @@ class FederationEventHandlerTests(unittest.FederatingHomeserverTestCase): ) else: - async def get_event(destination: str, event_id: str, timeout=None): + async def get_event( + destination: str, event_id: str, timeout: Optional[int] = None + ) -> JsonDict: self.assertEqual(destination, self.OTHER_SERVER_NAME) self.assertEqual(event_id, prev_event.event_id) return {"pdus": [prev_event.get_pdu_json()]} diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 99384837d0..c4727ab917 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -14,12 +14,16 @@ import logging from typing import Tuple +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.types import create_requester +from synapse.util import Clock from synapse.util.stringutils import random_string from tests import unittest @@ -35,7 +39,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_event_creation_handler() self._persist_event_storage_controller = ( self.hs.get_storage_controllers().persistence @@ -94,7 +98,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): ) ) - def test_duplicated_txn_id(self): + def test_duplicated_txn_id(self) -> None: """Test that attempting to handle/persist an event with a transaction ID that has already been persisted correctly returns the old event and does *not* produce duplicate messages. @@ -161,7 +165,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): # rather than the new one. self.assertEqual(ret_event1.event_id, ret_event4.event_id) - def test_duplicated_txn_id_one_call(self): + def test_duplicated_txn_id_one_call(self) -> None: """Test that we correctly handle duplicates that we try and persist at the same time. """ @@ -185,7 +189,9 @@ class EventCreationTestCase(unittest.HomeserverTestCase): self.assertEqual(len(events), 2) self.assertEqual(events[0].event_id, events[1].event_id) - def test_when_empty_prev_events_allowed_create_event_with_empty_prev_events(self): + def test_when_empty_prev_events_allowed_create_event_with_empty_prev_events( + self, + ) -> None: """When we set allow_no_prev_events=True, should be able to create a event without any prev_events (only auth_events). """ @@ -214,7 +220,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): def test_when_empty_prev_events_not_allowed_reject_event_with_empty_prev_events( self, - ): + ) -> None: """When we set allow_no_prev_events=False, shouldn't be able to create a event without any prev_events even if it has auth_events. Expect an exception to be raised. @@ -245,7 +251,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): def test_when_empty_prev_events_allowed_reject_event_with_empty_prev_events_and_auth_events( self, - ): + ) -> None: """When we set allow_no_prev_events=True, should be able to create a event without any prev_events or auth_events. Expect an exception to be raised. @@ -277,12 +283,12 @@ class ServerAclValidationTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user_id = self.register_user("tester", "foobar") self.access_token = self.login("tester", "foobar") self.room_id = self.helper.create_room_as(self.user_id, tok=self.access_token) - def test_allow_server_acl(self): + def test_allow_server_acl(self) -> None: """Test that sending an ACL that blocks everyone but ourselves works.""" self.helper.send_state( @@ -293,7 +299,7 @@ class ServerAclValidationTestCase(unittest.HomeserverTestCase): expect_code=200, ) - def test_deny_server_acl_block_outselves(self): + def test_deny_server_acl_block_outselves(self) -> None: """Test that sending an ACL that blocks ourselves does not work.""" self.helper.send_state( self.room_id, @@ -303,7 +309,7 @@ class ServerAclValidationTestCase(unittest.HomeserverTestCase): expect_code=400, ) - def test_deny_redact_server_acl(self): + def test_deny_redact_server_acl(self) -> None: """Test that attempting to redact an ACL is blocked.""" body = self.helper.send_state( diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 5955410524..49a1842b5c 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import os -from typing import Any, Dict, Tuple +from typing import Any, Awaitable, ContextManager, Dict, Optional, Tuple from unittest.mock import ANY, Mock, patch from urllib.parse import parse_qs, urlparse @@ -23,7 +23,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.handlers.sso import MappingException from synapse.http.site import SynapseRequest from synapse.server import HomeServer -from synapse.types import UserID +from synapse.types import JsonDict, UserID from synapse.util import Clock from synapse.util.macaroons import get_value_from_macaroon from synapse.util.stringutils import random_string @@ -34,6 +34,10 @@ from tests.unittest import HomeserverTestCase, override_config try: import authlib # noqa: F401 + from authlib.oidc.core import UserInfo + from authlib.oidc.discovery import OpenIDProviderMetadata + + from synapse.handlers.oidc import Token, UserAttributeDict HAS_OIDC = True except ImportError: @@ -70,29 +74,37 @@ EXPLICIT_ENDPOINT_CONFIG = { class TestMappingProvider: @staticmethod - def parse_config(config): - return + def parse_config(config: JsonDict) -> None: + return None - def __init__(self, config): + def __init__(self, config: None): pass - def get_remote_user_id(self, userinfo): + def get_remote_user_id(self, userinfo: "UserInfo") -> str: return userinfo["sub"] - async def map_user_attributes(self, userinfo, token): - return {"localpart": userinfo["username"], "display_name": None} + async def map_user_attributes( + self, userinfo: "UserInfo", token: "Token" + ) -> "UserAttributeDict": + # This is testing not providing the full map. + return {"localpart": userinfo["username"], "display_name": None} # type: ignore[typeddict-item] # Do not include get_extra_attributes to test backwards compatibility paths. class TestMappingProviderExtra(TestMappingProvider): - async def get_extra_attributes(self, userinfo, token): + async def get_extra_attributes( + self, userinfo: "UserInfo", token: "Token" + ) -> JsonDict: return {"phone": userinfo["phone"]} class TestMappingProviderFailures(TestMappingProvider): - async def map_user_attributes(self, userinfo, token, failures): - return { + # Superclass is testing the legacy interface for map_user_attributes. + async def map_user_attributes( # type: ignore[override] + self, userinfo: "UserInfo", token: "Token", failures: int + ) -> "UserAttributeDict": + return { # type: ignore[typeddict-item] "localpart": userinfo["username"] + (str(failures) if failures else ""), "display_name": None, } @@ -161,13 +173,13 @@ class OidcHandlerTestCase(HomeserverTestCase): self.hs_patcher.stop() return super().tearDown() - def reset_mocks(self): + def reset_mocks(self) -> None: """Reset all the Mocks.""" self.fake_server.reset_mocks() self.render_error.reset_mock() self.complete_sso_login.reset_mock() - def metadata_edit(self, values): + def metadata_edit(self, values: dict) -> ContextManager[Mock]: """Modify the result that will be returned by the well-known query""" metadata = self.fake_server.get_metadata() @@ -196,7 +208,9 @@ class OidcHandlerTestCase(HomeserverTestCase): session = self._generate_oidc_session_token(state, nonce, client_redirect_url) return _build_callback_request(code, state, session), grant - def assertRenderedError(self, error, error_description=None): + def assertRenderedError( + self, error: str, error_description: Optional[str] = None + ) -> Tuple[Any, ...]: self.render_error.assert_called_once() args = self.render_error.call_args[0] self.assertEqual(args[1], error) @@ -273,8 +287,8 @@ class OidcHandlerTestCase(HomeserverTestCase): """Provider metadatas are extensively validated.""" h = self.provider - def force_load_metadata(): - async def force_load(): + def force_load_metadata() -> Awaitable[None]: + async def force_load() -> "OpenIDProviderMetadata": return await h.load_metadata(force=True) return get_awaitable_result(force_load()) @@ -1198,7 +1212,7 @@ def _build_callback_request( state: str, session: str, ip_address: str = "10.0.0.1", -): +) -> Mock: """Builds a fake SynapseRequest to mock the browser callback Returns a Mock object which looks like the SynapseRequest we get from a browser diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index 75934b1707..0916de64f5 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -15,12 +15,13 @@ """Tests for the password_auth_provider interface""" from http import HTTPStatus -from typing import Any, Type, Union +from typing import Any, Dict, List, Optional, Type, Union from unittest.mock import Mock import synapse from synapse.api.constants import LoginType from synapse.api.errors import Codes +from synapse.handlers.account import AccountHandler from synapse.module_api import ModuleApi from synapse.rest.client import account, devices, login, logout, register from synapse.types import JsonDict, UserID @@ -44,13 +45,13 @@ class LegacyPasswordOnlyAuthProvider: """A legacy password_provider which only implements `check_password`.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def check_password(self, *args): + def check_password(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) @@ -58,16 +59,16 @@ class LegacyCustomAuthProvider: """A legacy password_provider which implements a custom login type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self): + def get_supported_login_types(self) -> Dict[str, List[str]]: return {"test.login_type": ["test_field"]} - def check_auth(self, *args): + def check_auth(self, *args: str) -> Mock: return mock_password_provider.check_auth(*args) @@ -75,15 +76,15 @@ class CustomAuthProvider: """A module which registers password_auth_provider callbacks for a custom login type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, api: ModuleApi): + def __init__(self, config: None, api: ModuleApi): api.register_password_auth_provider_callbacks( auth_checkers={("test.login_type", ("test_field",)): self.check_auth} ) - def check_auth(self, *args): + def check_auth(self, *args: Any) -> Mock: return mock_password_provider.check_auth(*args) @@ -92,16 +93,16 @@ class LegacyPasswordCustomAuthProvider: as a custom type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self): + def get_supported_login_types(self) -> Dict[str, List[str]]: return {"m.login.password": ["password"], "test.login_type": ["test_field"]} - def check_auth(self, *args): + def check_auth(self, *args: str) -> Mock: return mock_password_provider.check_auth(*args) @@ -110,10 +111,10 @@ class PasswordCustomAuthProvider: as well as a password login""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, api: ModuleApi): + def __init__(self, config: None, api: ModuleApi): api.register_password_auth_provider_callbacks( auth_checkers={ ("test.login_type", ("test_field",)): self.check_auth, @@ -121,10 +122,10 @@ class PasswordCustomAuthProvider: } ) - def check_auth(self, *args): + def check_auth(self, *args: Any) -> Mock: return mock_password_provider.check_auth(*args) - def check_pass(self, *args): + def check_pass(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) @@ -161,16 +162,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): CALLBACK_USERNAME = "get_username_for_registration" CALLBACK_DISPLAYNAME = "get_displayname_for_registration" - def setUp(self): + def setUp(self) -> None: # we use a global mock device, so make sure we are starting with a clean slate mock_password_provider.reset_mock() super().setUp() @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_password_only_auth_progiver_login_legacy(self): + def test_password_only_auth_progiver_login_legacy(self) -> None: self.password_only_auth_provider_login_test_body() - def password_only_auth_provider_login_test_body(self): + def password_only_auth_provider_login_test_body(self) -> None: # login flows should only have m.login.password flows = self._get_login_flows() self.assertEqual(flows, [{"type": "m.login.password"}] + ADDITIONAL_LOGIN_FLOWS) @@ -201,10 +202,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): ) @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_password_only_auth_provider_ui_auth_legacy(self): + def test_password_only_auth_provider_ui_auth_legacy(self) -> None: self.password_only_auth_provider_ui_auth_test_body() - def password_only_auth_provider_ui_auth_test_body(self): + def password_only_auth_provider_ui_auth_test_body(self) -> None: """UI Auth should delegate correctly to the password provider""" # create the user, otherwise access doesn't work @@ -238,10 +239,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): mock_password_provider.check_password.assert_called_once_with("@u:test", "p") @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_local_user_fallback_login_legacy(self): + def test_local_user_fallback_login_legacy(self) -> None: self.local_user_fallback_login_test_body() - def local_user_fallback_login_test_body(self): + def local_user_fallback_login_test_body(self) -> None: """rejected login should fall back to local db""" self.register_user("localuser", "localpass") @@ -255,10 +256,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): self.assertEqual("@localuser:test", channel.json_body["user_id"]) @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_local_user_fallback_ui_auth_legacy(self): + def test_local_user_fallback_ui_auth_legacy(self) -> None: self.local_user_fallback_ui_auth_test_body() - def local_user_fallback_ui_auth_test_body(self): + def local_user_fallback_ui_auth_test_body(self) -> None: """rejected login should fall back to local db""" self.register_user("localuser", "localpass") @@ -298,10 +299,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"localdb_enabled": False}, } ) - def test_no_local_user_fallback_login_legacy(self): + def test_no_local_user_fallback_login_legacy(self) -> None: self.no_local_user_fallback_login_test_body() - def no_local_user_fallback_login_test_body(self): + def no_local_user_fallback_login_test_body(self) -> None: """localdb_enabled can block login with the local password""" self.register_user("localuser", "localpass") @@ -320,10 +321,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"localdb_enabled": False}, } ) - def test_no_local_user_fallback_ui_auth_legacy(self): + def test_no_local_user_fallback_ui_auth_legacy(self) -> None: self.no_local_user_fallback_ui_auth_test_body() - def no_local_user_fallback_ui_auth_test_body(self): + def no_local_user_fallback_ui_auth_test_body(self) -> None: """localdb_enabled can block ui auth with the local password""" self.register_user("localuser", "localpass") @@ -361,10 +362,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_password_auth_disabled_legacy(self): + def test_password_auth_disabled_legacy(self) -> None: self.password_auth_disabled_test_body() - def password_auth_disabled_test_body(self): + def password_auth_disabled_test_body(self) -> None: """password auth doesn't work if it's disabled across the board""" # login flows should be empty flows = self._get_login_flows() @@ -376,14 +377,14 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): mock_password_provider.check_password.assert_not_called() @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_login_legacy(self): + def test_custom_auth_provider_login_legacy(self) -> None: self.custom_auth_provider_login_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_login(self): + def test_custom_auth_provider_login(self) -> None: self.custom_auth_provider_login_test_body() - def custom_auth_provider_login_test_body(self): + def custom_auth_provider_login_test_body(self) -> None: # login flows should have the custom flow and m.login.password, since we # haven't disabled local password lookup. # (password must come first, because reasons) @@ -424,14 +425,14 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): ) @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_ui_auth_legacy(self): + def test_custom_auth_provider_ui_auth_legacy(self) -> None: self.custom_auth_provider_ui_auth_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_ui_auth(self): + def test_custom_auth_provider_ui_auth(self) -> None: self.custom_auth_provider_ui_auth_test_body() - def custom_auth_provider_ui_auth_test_body(self): + def custom_auth_provider_ui_auth_test_body(self) -> None: # register the user and log in twice, to get two devices self.register_user("localuser", "localpass") tok1 = self.login("localuser", "localpass") @@ -486,14 +487,14 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): ) @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_callback_legacy(self): + def test_custom_auth_provider_callback_legacy(self) -> None: self.custom_auth_provider_callback_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_callback(self): + def test_custom_auth_provider_callback(self) -> None: self.custom_auth_provider_callback_test_body() - def custom_auth_provider_callback_test_body(self): + def custom_auth_provider_callback_test_body(self) -> None: callback = Mock(return_value=make_awaitable(None)) mock_password_provider.check_auth.return_value = make_awaitable( @@ -521,16 +522,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_custom_auth_password_disabled_legacy(self): + def test_custom_auth_password_disabled_legacy(self) -> None: self.custom_auth_password_disabled_test_body() @override_config( {**providers_config(CustomAuthProvider), "password_config": {"enabled": False}} ) - def test_custom_auth_password_disabled(self): + def test_custom_auth_password_disabled(self) -> None: self.custom_auth_password_disabled_test_body() - def custom_auth_password_disabled_test_body(self): + def custom_auth_password_disabled_test_body(self) -> None: """Test login with a custom auth provider where password login is disabled""" self.register_user("localuser", "localpass") @@ -548,7 +549,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False, "localdb_enabled": False}, } ) - def test_custom_auth_password_disabled_localdb_enabled_legacy(self): + def test_custom_auth_password_disabled_localdb_enabled_legacy(self) -> None: self.custom_auth_password_disabled_localdb_enabled_test_body() @override_config( @@ -557,10 +558,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False, "localdb_enabled": False}, } ) - def test_custom_auth_password_disabled_localdb_enabled(self): + def test_custom_auth_password_disabled_localdb_enabled(self) -> None: self.custom_auth_password_disabled_localdb_enabled_test_body() - def custom_auth_password_disabled_localdb_enabled_test_body(self): + def custom_auth_password_disabled_localdb_enabled_test_body(self) -> None: """Check the localdb_enabled == enabled == False Regression test for https://github.com/matrix-org/synapse/issues/8914: check @@ -583,7 +584,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_login_legacy(self): + def test_password_custom_auth_password_disabled_login_legacy(self) -> None: self.password_custom_auth_password_disabled_login_test_body() @override_config( @@ -592,10 +593,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_login(self): + def test_password_custom_auth_password_disabled_login(self) -> None: self.password_custom_auth_password_disabled_login_test_body() - def password_custom_auth_password_disabled_login_test_body(self): + def password_custom_auth_password_disabled_login_test_body(self) -> None: """log in with a custom auth provider which implements password, but password login is disabled""" self.register_user("localuser", "localpass") @@ -615,7 +616,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_ui_auth_legacy(self): + def test_password_custom_auth_password_disabled_ui_auth_legacy(self) -> None: self.password_custom_auth_password_disabled_ui_auth_test_body() @override_config( @@ -624,10 +625,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_ui_auth(self): + def test_password_custom_auth_password_disabled_ui_auth(self) -> None: self.password_custom_auth_password_disabled_ui_auth_test_body() - def password_custom_auth_password_disabled_ui_auth_test_body(self): + def password_custom_auth_password_disabled_ui_auth_test_body(self) -> None: """UI Auth with a custom auth provider which implements password, but password login is disabled""" # register the user and log in twice via the test login type to get two devices, @@ -689,7 +690,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"localdb_enabled": False}, } ) - def test_custom_auth_no_local_user_fallback_legacy(self): + def test_custom_auth_no_local_user_fallback_legacy(self) -> None: self.custom_auth_no_local_user_fallback_test_body() @override_config( @@ -698,10 +699,10 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): "password_config": {"localdb_enabled": False}, } ) - def test_custom_auth_no_local_user_fallback(self): + def test_custom_auth_no_local_user_fallback(self) -> None: self.custom_auth_no_local_user_fallback_test_body() - def custom_auth_no_local_user_fallback_test_body(self): + def custom_auth_no_local_user_fallback_test_body(self) -> None: """Test login with a custom auth provider where the local db is disabled""" self.register_user("localuser", "localpass") @@ -713,14 +714,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): channel = self._send_password_login("localuser", "localpass") self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result) - def test_on_logged_out(self): + def test_on_logged_out(self) -> None: """Tests that the on_logged_out callback is called when the user logs out.""" self.register_user("rin", "password") tok = self.login("rin", "password") self.called = False - async def on_logged_out(user_id, device_id, access_token): + async def on_logged_out( + user_id: str, device_id: Optional[str], access_token: str + ) -> None: self.called = True on_logged_out = Mock(side_effect=on_logged_out) @@ -738,7 +741,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): on_logged_out.assert_called_once() self.assertTrue(self.called) - def test_username(self): + def test_username(self) -> None: """Tests that the get_username_for_registration callback can define the username of a user when registering. """ @@ -763,7 +766,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): mxid = channel.json_body["user_id"] self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo") - def test_username_uia(self): + def test_username_uia(self) -> None: """Tests that the get_username_for_registration callback is only called at the end of the UIA flow. """ @@ -782,7 +785,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): # Set some email configuration so the test doesn't fail because of its absence. @override_config({"email": {"notif_from": "noreply@test"}}) - def test_3pid_allowed(self): + def test_3pid_allowed(self) -> None: """Tests that an is_3pid_allowed_callbacks forbidding a 3PID makes Synapse refuse to bind the new 3PID, and that one allowing a 3PID makes Synapse accept to bind the 3PID. Also checks that the module is passed a boolean indicating whether the @@ -791,7 +794,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): self._test_3pid_allowed("rin", False) self._test_3pid_allowed("kitay", True) - def test_displayname(self): + def test_displayname(self) -> None: """Tests that the get_displayname_for_registration callback can define the display name of a user when registering. """ @@ -820,7 +823,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): self.assertEqual(display_name, username + "-foo") - def test_displayname_uia(self): + def test_displayname_uia(self) -> None: """Tests that the get_displayname_for_registration callback is only called at the end of the UIA flow. """ @@ -841,7 +844,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): # Check that the callback has been called. m.assert_called_once() - def _test_3pid_allowed(self, username: str, registration: bool): + def _test_3pid_allowed(self, username: str, registration: bool) -> None: """Tests that the "is_3pid_allowed" module callback is called correctly, using either /register or /account URLs depending on the arguments. @@ -907,7 +910,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): client is trying to register. """ - async def callback(uia_results, params): + async def callback(uia_results: JsonDict, params: JsonDict) -> str: self.assertIn(LoginType.DUMMY, uia_results) username = params["username"] return username + "-foo" @@ -950,12 +953,13 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): def _send_password_login(self, user: str, password: str) -> FakeChannel: return self._send_login(type="m.login.password", user=user, password=password) - def _send_login(self, type, user, **params) -> FakeChannel: - params.update({"identifier": {"type": "m.id.user", "user": user}, "type": type}) + def _send_login(self, type: str, user: str, **extra_params: str) -> FakeChannel: + params = {"identifier": {"type": "m.id.user", "user": user}, "type": type} + params.update(extra_params) channel = self.make_request("POST", "/_matrix/client/r0/login", params) return channel - def _start_delete_device_session(self, access_token, device_id) -> str: + def _start_delete_device_session(self, access_token: str, device_id: str) -> str: """Make an initial delete device request, and return the UI Auth session ID""" channel = self._delete_device(access_token, device_id) self.assertEqual(channel.code, 401) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 584e7b8971..19f5322317 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Optional, cast from unittest.mock import Mock, call from parameterized import parameterized from signedjson.key import generate_signing_key +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes, Membership, PresenceState from synapse.api.presence import UserPresenceState from synapse.api.room_versions import KNOWN_ROOM_VERSIONS @@ -35,7 +37,9 @@ from synapse.handlers.presence import ( ) from synapse.rest import admin from synapse.rest.client import room -from synapse.types import UserID, get_domain_from_id +from synapse.server import HomeServer +from synapse.types import JsonDict, UserID, get_domain_from_id +from synapse.util import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase @@ -44,10 +48,12 @@ from tests.replication._base import BaseMultiWorkerStreamTestCase class PresenceUpdateTestCase(unittest.HomeserverTestCase): servlets = [admin.register_servlets] - def prepare(self, reactor, clock, homeserver): + def prepare( + self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer + ) -> None: self.store = homeserver.get_datastores().main - def test_offline_to_online(self): + def test_offline_to_online(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -85,7 +91,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_online_to_online(self): + def test_online_to_online(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -128,7 +134,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_online_to_online_last_active_noop(self): + def test_online_to_online_last_active_noop(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -173,7 +179,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_online_to_online_last_active(self): + def test_online_to_online_last_active(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -210,7 +216,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_remote_ping_timer(self): + def test_remote_ping_timer(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -244,7 +250,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_online_to_offline(self): + def test_online_to_offline(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -266,7 +272,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): self.assertEqual(wheel_timer.insert.call_count, 0) - def test_online_to_idle(self): + def test_online_to_idle(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -300,7 +306,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): any_order=True, ) - def test_persisting_presence_updates(self): + def test_persisting_presence_updates(self) -> None: """Tests that the latest presence state for each user is persisted correctly""" # Create some test users and presence states for them presence_states = [] @@ -322,7 +328,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): self.get_success(self.store.update_presence(presence_states)) # Check that each update is present in the database - db_presence_states = self.get_success( + db_presence_states_raw = self.get_success( self.store.get_all_presence_updates( instance_name="master", last_id=0, @@ -332,7 +338,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): ) # Extract presence update user ID and state information into lists of tuples - db_presence_states = [(ps[0], ps[1]) for _, ps in db_presence_states[0]] + db_presence_states = [(ps[0], ps[1]) for _, ps in db_presence_states_raw[0]] presence_states_compare = [(ps.user_id, ps.state) for ps in presence_states] # Compare what we put into the storage with what we got out. @@ -343,7 +349,7 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): class PresenceTimeoutTestCase(unittest.TestCase): """Tests different timers and that the timer does not change `status_msg` of user.""" - def test_idle_timer(self): + def test_idle_timer(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -363,7 +369,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertEqual(new_state.state, PresenceState.UNAVAILABLE) self.assertEqual(new_state.status_msg, status_msg) - def test_busy_no_idle(self): + def test_busy_no_idle(self) -> None: """ Tests that a user setting their presence to busy but idling doesn't turn their presence state into unavailable. @@ -387,7 +393,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertEqual(new_state.state, PresenceState.BUSY) self.assertEqual(new_state.status_msg, status_msg) - def test_sync_timeout(self): + def test_sync_timeout(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -407,7 +413,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertEqual(new_state.state, PresenceState.OFFLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_sync_online(self): + def test_sync_online(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -429,7 +435,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertEqual(new_state.state, PresenceState.ONLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_federation_ping(self): + def test_federation_ping(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -448,7 +454,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertIsNotNone(new_state) self.assertEqual(state, new_state) - def test_no_timeout(self): + def test_no_timeout(self) -> None: user_id = "@foo:bar" now = 5000000 @@ -464,7 +470,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertIsNone(new_state) - def test_federation_timeout(self): + def test_federation_timeout(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -487,7 +493,7 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertEqual(new_state.state, PresenceState.OFFLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_last_active(self): + def test_last_active(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -508,15 +514,15 @@ class PresenceTimeoutTestCase(unittest.TestCase): class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.presence_handler = hs.get_presence_handler() self.clock = hs.get_clock() - def test_external_process_timeout(self): + def test_external_process_timeout(self) -> None: """Test that if an external process doesn't update the records for a while we time out their syncing users presence. """ - process_id = 1 + process_id = "1" user_id = "@test:server" # Notify handler that a user is now syncing. @@ -544,7 +550,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): ) self.assertEqual(state.state, PresenceState.OFFLINE) - def test_user_goes_offline_by_timeout_status_msg_remain(self): + def test_user_goes_offline_by_timeout_status_msg_remain(self) -> None: """Test that if a user doesn't update the records for a while users presence goes `OFFLINE` because of timeout and `status_msg` remains. """ @@ -576,7 +582,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): self.assertEqual(state.state, PresenceState.OFFLINE) self.assertEqual(state.status_msg, status_msg) - def test_user_goes_offline_manually_with_no_status_msg(self): + def test_user_goes_offline_manually_with_no_status_msg(self) -> None: """Test that if a user change presence manually to `OFFLINE` and no status is set, that `status_msg` is `None`. """ @@ -601,7 +607,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): self.assertEqual(state.state, PresenceState.OFFLINE) self.assertEqual(state.status_msg, None) - def test_user_goes_offline_manually_with_status_msg(self): + def test_user_goes_offline_manually_with_status_msg(self) -> None: """Test that if a user change presence manually to `OFFLINE` and a status is set, that `status_msg` appears. """ @@ -618,7 +624,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): user_id, PresenceState.OFFLINE, "And now here." ) - def test_user_reset_online_with_no_status(self): + def test_user_reset_online_with_no_status(self) -> None: """Test that if a user set again the presence manually and no status is set, that `status_msg` is `None`. """ @@ -644,7 +650,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): self.assertEqual(state.state, PresenceState.ONLINE) self.assertEqual(state.status_msg, None) - def test_set_presence_with_status_msg_none(self): + def test_set_presence_with_status_msg_none(self) -> None: """Test that if a user set again the presence manually and status is `None`, that `status_msg` is `None`. """ @@ -659,7 +665,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): # Mark user as online and `status_msg = None` self._set_presencestate_with_status_msg(user_id, PresenceState.ONLINE, None) - def test_set_presence_from_syncing_not_set(self): + def test_set_presence_from_syncing_not_set(self) -> None: """Test that presence is not set by syncing if affect_presence is false""" user_id = "@test:server" status_msg = "I'm here!" @@ -680,7 +686,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): # and status message should still be the same self.assertEqual(state.status_msg, status_msg) - def test_set_presence_from_syncing_is_set(self): + def test_set_presence_from_syncing_is_set(self) -> None: """Test that presence is set by syncing if affect_presence is true""" user_id = "@test:server" status_msg = "I'm here!" @@ -699,7 +705,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): # we should now be online self.assertEqual(state.state, PresenceState.ONLINE) - def test_set_presence_from_syncing_keeps_status(self): + def test_set_presence_from_syncing_keeps_status(self) -> None: """Test that presence set by syncing retains status message""" user_id = "@test:server" status_msg = "I'm here!" @@ -726,7 +732,9 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): }, } ) - def test_set_presence_from_syncing_keeps_busy(self, test_with_workers: bool): + def test_set_presence_from_syncing_keeps_busy( + self, test_with_workers: bool + ) -> None: """Test that presence set by syncing doesn't affect busy status Args: @@ -767,7 +775,7 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): def _set_presencestate_with_status_msg( self, user_id: str, state: str, status_msg: Optional[str] - ): + ) -> None: """Set a PresenceState and status_msg and check the result. Args: @@ -790,14 +798,14 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.presence_handler = hs.get_presence_handler() self.clock = hs.get_clock() self.instance_name = hs.get_instance_name() self.queue = self.presence_handler.get_federation_queue() - def test_send_and_get(self): + def test_send_and_get(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -834,7 +842,7 @@ class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): self.assertFalse(limited) self.assertCountEqual(rows, []) - def test_send_and_get_split(self): + def test_send_and_get_split(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -877,7 +885,7 @@ class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): self.assertCountEqual(rows, expected_rows) - def test_clear_queue_all(self): + def test_clear_queue_all(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -921,7 +929,7 @@ class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): self.assertCountEqual(rows, expected_rows) - def test_partially_clear_queue(self): + def test_partially_clear_queue(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -982,7 +990,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): servlets = [room.register_servlets] - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "server", federation_http_client=None, @@ -990,14 +998,14 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): ) return hs - def default_config(self): + def default_config(self) -> JsonDict: config = super().default_config() # Enable federation sending on the main process. config["federation_sender_instances"] = None return config - def prepare(self, reactor, clock, hs): - self.federation_sender = hs.get_federation_sender() + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.federation_sender = cast(Mock, hs.get_federation_sender()) self.event_builder_factory = hs.get_event_builder_factory() self.federation_event_handler = hs.get_federation_event_handler() self.presence_handler = hs.get_presence_handler() @@ -1013,7 +1021,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): # random key to use. self.random_signing_key = generate_signing_key("ver") - def test_remote_joins(self): + def test_remote_joins(self) -> None: # We advance time to something that isn't 0, as we use 0 as a special # value. self.reactor.advance(1000000000000) @@ -1061,7 +1069,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): destinations={"server3"}, states=[expected_state] ) - def test_remote_gets_presence_when_local_user_joins(self): + def test_remote_gets_presence_when_local_user_joins(self) -> None: # We advance time to something that isn't 0, as we use 0 as a special # value. self.reactor.advance(1000000000000) @@ -1110,7 +1118,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): destinations={"server2", "server3"}, states=[expected_state] ) - def _add_new_user(self, room_id, user_id): + def _add_new_user(self, room_id: str, user_id: str) -> None: """Add new user to the room by creating an event and poking the federation API.""" hostname = get_domain_from_id(user_id) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 675aa023ac..7c174782da 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -332,7 +332,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): @unittest.override_config( {"server_name": "test:8888", "allowed_avatar_mimetypes": ["image/png"]} ) - def test_avatar_constraint_on_local_server_with_port(self): + def test_avatar_constraint_on_local_server_with_port(self) -> None: """Test that avatar metadata is correctly fetched when the media is on a local server and the server has an explicit port. @@ -376,7 +376,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): self.get_success(self.handler.check_avatar_size_and_mime_type(remote_mxc)) ) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]): + def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index b55238650c..f60400ff8d 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -15,14 +15,18 @@ from copy import deepcopy from typing import List +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EduTypes, ReceiptTypes +from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest class ReceiptsTestCase(unittest.HomeserverTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_source = hs.get_event_sources().sources.receipt def test_filters_out_private_receipt(self) -> None: diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 765df75d91..b9332d97dc 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -12,8 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Collection, List, Optional, Tuple from unittest.mock import Mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.auth import Auth from synapse.api.constants import UserTypes from synapse.api.errors import ( @@ -22,8 +25,18 @@ from synapse.api.errors import ( ResourceLimitError, SynapseError, ) +from synapse.module_api import ModuleApi +from synapse.server import HomeServer from synapse.spam_checker_api import RegistrationBehaviour -from synapse.types import RoomAlias, RoomID, UserID, create_requester +from synapse.types import ( + JsonDict, + Requester, + RoomAlias, + RoomID, + UserID, + create_requester, +) +from synapse.util import Clock from tests.test_utils import make_awaitable from tests.unittest import override_config @@ -33,94 +46,98 @@ from .. import unittest class TestSpamChecker: - def __init__(self, config, api): + def __init__(self, config: None, api: ModuleApi): api.register_spam_checker_callbacks( check_registration_for_spam=self.check_registration_for_spam, ) @staticmethod - def parse_config(config): - return config + def parse_config(config: JsonDict) -> None: + return None async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: pass class DenyAll(TestSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY class BanAll(TestSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: return RegistrationBehaviour.SHADOW_BAN class BanBadIdPUser(TestSpamChecker): async def check_registration_for_spam( - self, email_threepid, username, request_info, auth_provider_id=None - ): + self, + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str] = None, + ) -> RegistrationBehaviour: # Reject any user coming from CAS and whose username contains profanity - if auth_provider_id == "cas" and "flimflob" in username: + if auth_provider_id == "cas" and username and "flimflob" in username: return RegistrationBehaviour.DENY return RegistrationBehaviour.ALLOW class TestLegacyRegistrationSpamChecker: - def __init__(self, config, api): + def __init__(self, config: None, api: ModuleApi): pass async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: pass class LegacyAllowAll(TestLegacyRegistrationSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW class LegacyDenyAll(TestLegacyRegistrationSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY class RegistrationTestCase(unittest.HomeserverTestCase): """Tests the RegistrationHandler.""" - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs_config = self.default_config() # some of the tests rely on us having a user consent version @@ -145,7 +162,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): return hs - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_registration_handler() self.store = self.hs.get_datastores().main self.lots_of_users = 100 @@ -153,7 +170,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.requester = create_requester("@requester:test") - def test_user_is_created_and_logged_in_if_doesnt_exist(self): + def test_user_is_created_and_logged_in_if_doesnt_exist(self) -> None: frank = UserID.from_string("@frank:test") user_id = frank.to_string() requester = create_requester(user_id) @@ -164,7 +181,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertIsInstance(result_token, str) self.assertGreater(len(result_token), 20) - def test_if_user_exists(self): + def test_if_user_exists(self) -> None: store = self.hs.get_datastores().main frank = UserID.from_string("@frank:test") self.get_success( @@ -180,12 +197,12 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertTrue(result_token is not None) @override_config({"limit_usage_by_mau": False}) - def test_mau_limits_when_disabled(self): + def test_mau_limits_when_disabled(self) -> None: # Ensure does not throw exception self.get_success(self.get_or_create_user(self.requester, "a", "display_name")) @override_config({"limit_usage_by_mau": True}) - def test_get_or_create_user_mau_not_blocked(self): + def test_get_or_create_user_mau_not_blocked(self) -> None: self.store.count_monthly_users = Mock( return_value=make_awaitable(self.hs.config.server.max_mau_value - 1) ) @@ -193,7 +210,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.get_success(self.get_or_create_user(self.requester, "c", "User")) @override_config({"limit_usage_by_mau": True}) - def test_get_or_create_user_mau_blocked(self): + def test_get_or_create_user_mau_blocked(self) -> None: self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -211,7 +228,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ) @override_config({"limit_usage_by_mau": True}) - def test_register_mau_blocked(self): + def test_register_mau_blocked(self) -> None: self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -229,7 +246,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): @override_config( {"auto_join_rooms": ["#room:test"], "auto_join_rooms_for_guests": False} ) - def test_auto_join_rooms_for_guests(self): + def test_auto_join_rooms_for_guests(self) -> None: user_id = self.get_success( self.handler.register_user(localpart="jeff", make_guest=True), ) @@ -237,7 +254,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms(self): + def test_auto_create_auto_join_rooms(self) -> None: room_alias_str = "#room:test" user_id = self.get_success(self.handler.register_user(localpart="jeff")) rooms = self.get_success(self.store.get_rooms_for_user(user_id)) @@ -249,7 +266,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertEqual(len(rooms), 1) @override_config({"auto_join_rooms": []}) - def test_auto_create_auto_join_rooms_with_no_rooms(self): + def test_auto_create_auto_join_rooms_with_no_rooms(self) -> None: frank = UserID.from_string("@frank:test") user_id = self.get_success(self.handler.register_user(frank.localpart)) self.assertEqual(user_id, frank.to_string()) @@ -257,7 +274,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:another"]}) - def test_auto_create_auto_join_where_room_is_another_domain(self): + def test_auto_create_auto_join_where_room_is_another_domain(self) -> None: frank = UserID.from_string("@frank:test") user_id = self.get_success(self.handler.register_user(frank.localpart)) self.assertEqual(user_id, frank.to_string()) @@ -267,13 +284,13 @@ class RegistrationTestCase(unittest.HomeserverTestCase): @override_config( {"auto_join_rooms": ["#room:test"], "autocreate_auto_join_rooms": False} ) - def test_auto_create_auto_join_where_auto_create_is_false(self): + def test_auto_create_auto_join_where_auto_create_is_false(self) -> None: user_id = self.get_success(self.handler.register_user(localpart="jeff")) rooms = self.get_success(self.store.get_rooms_for_user(user_id)) self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_not_a_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_not_a_real_user(self) -> None: room_alias_str = "#room:test" self.store.is_real_user = Mock(return_value=make_awaitable(False)) user_id = self.get_success(self.handler.register_user(localpart="support")) @@ -284,7 +301,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.get_failure(directory_handler.get_association(room_alias), SynapseError) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_the_first_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_the_first_real_user(self) -> None: room_alias_str = "#room:test" self.store.count_real_users = Mock(return_value=make_awaitable(1)) @@ -299,7 +316,9 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertEqual(len(rooms), 1) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_not_the_first_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_not_the_first_real_user( + self, + ) -> None: self.store.count_real_users = Mock(return_value=make_awaitable(2)) self.store.is_real_user = Mock(return_value=make_awaitable(True)) user_id = self.get_success(self.handler.register_user(localpart="real")) @@ -312,7 +331,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): "autocreate_auto_join_rooms_federated": False, } ) - def test_auto_create_auto_join_rooms_federated(self): + def test_auto_create_auto_join_rooms_federated(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -339,7 +358,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): @override_config( {"auto_join_rooms": ["#room:test"], "auto_join_mxid_localpart": "support"} ) - def test_auto_join_mxid_localpart(self): + def test_auto_join_mxid_localpart(self) -> None: """ Ensure the user still needs up in the room created by a different user. """ @@ -376,7 +395,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset(self): + def test_auto_create_auto_join_room_preset(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -416,7 +435,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset_guest(self): + def test_auto_create_auto_join_room_preset_guest(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -454,7 +473,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset_invalid_permissions(self): + def test_auto_create_auto_join_room_preset_invalid_permissions(self) -> None: """ Auto-created rooms that are private require an invite, check that registration doesn't completely break if the inviter doesn't have proper @@ -525,7 +544,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): "auto_join_rooms": ["#room:test"], }, ) - def test_auto_create_auto_join_where_no_consent(self): + def test_auto_create_auto_join_where_no_consent(self) -> None: """Test to ensure that the first user is not auto-joined to a room if they have not given general consent. """ @@ -550,19 +569,19 @@ class RegistrationTestCase(unittest.HomeserverTestCase): rooms = self.get_success(self.store.get_rooms_for_user(user_id)) self.assertEqual(len(rooms), 1) - def test_register_support_user(self): + def test_register_support_user(self) -> None: user_id = self.get_success( self.handler.register_user(localpart="user", user_type=UserTypes.SUPPORT) ) d = self.store.is_support_user(user_id) self.assertTrue(self.get_success(d)) - def test_register_not_support_user(self): + def test_register_not_support_user(self) -> None: user_id = self.get_success(self.handler.register_user(localpart="user")) d = self.store.is_support_user(user_id) self.assertFalse(self.get_success(d)) - def test_invalid_user_id_length(self): + def test_invalid_user_id_length(self) -> None: invalid_user_id = "x" * 256 self.get_failure( self.handler.register_user(localpart=invalid_user_id), SynapseError @@ -577,7 +596,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ] } ) - def test_spam_checker_deny(self): + def test_spam_checker_deny(self) -> None: """A spam checker can deny registration, which results in an error.""" self.get_failure(self.handler.register_user(localpart="user"), SynapseError) @@ -590,7 +609,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ] } ) - def test_spam_checker_legacy_allow(self): + def test_spam_checker_legacy_allow(self) -> None: """Tests that a legacy spam checker implementing the legacy 3-arg version of the check_registration_for_spam callback is correctly called. @@ -610,7 +629,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ] } ) - def test_spam_checker_legacy_deny(self): + def test_spam_checker_legacy_deny(self) -> None: """Tests that a legacy spam checker implementing the legacy 3-arg version of the check_registration_for_spam callback is correctly called. @@ -630,7 +649,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ] } ) - def test_spam_checker_shadow_ban(self): + def test_spam_checker_shadow_ban(self) -> None: """A spam checker can choose to shadow-ban a user, which allows registration to succeed.""" user_id = self.get_success(self.handler.register_user(localpart="user")) @@ -660,7 +679,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ] } ) - def test_spam_checker_receives_sso_type(self): + def test_spam_checker_receives_sso_type(self) -> None: """Test rejecting registration based on SSO type""" f = self.get_failure( self.handler.register_user(localpart="bobflimflob", auth_provider_id="cas"), @@ -678,8 +697,12 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ) async def get_or_create_user( - self, requester, localpart, displayname, password_hash=None - ): + self, + requester: Requester, + localpart: str, + displayname: Optional[str], + password_hash: Optional[str] = None, + ) -> Tuple[str, str]: """Creates a new user if the user does not exist, else revokes all previous access tokens and generates a new one. @@ -734,13 +757,15 @@ class RegistrationTestCase(unittest.HomeserverTestCase): class RemoteAutoJoinTestCase(unittest.HomeserverTestCase): """Tests auto-join on remote rooms.""" - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.room_id = "!roomid:remotetest" - async def update_membership(*args, **kwargs): + async def update_membership(*args: Any, **kwargs: Any) -> None: pass - async def lookup_room_alias(*args, **kwargs): + async def lookup_room_alias( + *args: Any, **kwargs: Any + ) -> Tuple[RoomID, List[str]]: return RoomID.from_string(self.room_id), ["remotetest"] self.room_member_handler = Mock(spec=["update_membership", "lookup_room_alias"]) @@ -750,12 +775,12 @@ class RemoteAutoJoinTestCase(unittest.HomeserverTestCase): hs = self.setup_test_homeserver(room_member_handler=self.room_member_handler) return hs - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_registration_handler() self.store = self.hs.get_datastores().main @override_config({"auto_join_rooms": ["#room:remotetest"]}) - def test_auto_create_auto_join_remote_room(self): + def test_auto_create_auto_join_remote_room(self) -> None: """Tests that we don't attempt to create remote rooms, and that we don't attempt to invite ourselves to rooms we're not in.""" diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index fcde5dab72..df95490d3b 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -14,7 +14,7 @@ class EncryptedByDefaultTestCase(unittest.HomeserverTestCase): ] @override_config({"encryption_enabled_by_default_for_room_type": "all"}) - def test_encrypted_by_default_config_option_all(self): + def test_encrypted_by_default_config_option_all(self) -> None: """Tests that invite-only and non-invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "all". """ @@ -45,7 +45,7 @@ class EncryptedByDefaultTestCase(unittest.HomeserverTestCase): self.assertEqual(event_content, {"algorithm": RoomEncryptionAlgorithms.DEFAULT}) @override_config({"encryption_enabled_by_default_for_room_type": "invite"}) - def test_encrypted_by_default_config_option_invite(self): + def test_encrypted_by_default_config_option_invite(self) -> None: """Tests that only new, invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "invite". """ @@ -76,7 +76,7 @@ class EncryptedByDefaultTestCase(unittest.HomeserverTestCase): ) @override_config({"encryption_enabled_by_default_for_room_type": "off"}) - def test_encrypted_by_default_config_option_off(self): + def test_encrypted_by_default_config_option_off(self) -> None: """Tests that neither new invite-only nor non-invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "off". diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py index aa650756e4..d907fcaf04 100644 --- a/tests/handlers/test_room_summary.py +++ b/tests/handlers/test_room_summary.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Iterable, List, Optional, Tuple +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from unittest import mock from twisted.internet.defer import ensureDeferred +from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import ( EventContentFields, @@ -34,11 +35,14 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester +from synapse.util import Clock from tests import unittest -def _create_event(room_id: str, order: Optional[Any] = None, origin_server_ts: int = 0): +def _create_event( + room_id: str, order: Optional[Any] = None, origin_server_ts: int = 0 +) -> mock.Mock: result = mock.Mock(name=room_id) result.room_id = room_id result.content = {} @@ -48,40 +52,40 @@ def _create_event(room_id: str, order: Optional[Any] = None, origin_server_ts: i return result -def _order(*events): +def _order(*events: mock.Mock) -> List[mock.Mock]: return sorted(events, key=_child_events_comparison_key) class TestSpaceSummarySort(unittest.TestCase): - def test_no_order_last(self): + def test_no_order_last(self) -> None: """An event with no ordering is placed behind those with an ordering.""" ev1 = _create_event("!abc:test") ev2 = _create_event("!xyz:test", "xyz") self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_order(self): + def test_order(self) -> None: """The ordering should be used.""" ev1 = _create_event("!abc:test", "xyz") ev2 = _create_event("!xyz:test", "abc") self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_order_origin_server_ts(self): + def test_order_origin_server_ts(self) -> None: """Origin server is a tie-breaker for ordering.""" ev1 = _create_event("!abc:test", origin_server_ts=10) ev2 = _create_event("!xyz:test", origin_server_ts=30) self.assertEqual([ev1, ev2], _order(ev1, ev2)) - def test_order_room_id(self): + def test_order_room_id(self) -> None: """Room ID is a final tie-breaker for ordering.""" ev1 = _create_event("!abc:test") ev2 = _create_event("!xyz:test") self.assertEqual([ev1, ev2], _order(ev1, ev2)) - def test_invalid_ordering_type(self): + def test_invalid_ordering_type(self) -> None: """Invalid orderings are considered the same as missing.""" ev1 = _create_event("!abc:test", 1) ev2 = _create_event("!xyz:test", "xyz") @@ -97,7 +101,7 @@ class TestSpaceSummarySort(unittest.TestCase): ev1 = _create_event("!abc:test", True) self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_invalid_ordering_value(self): + def test_invalid_ordering_value(self) -> None: """Invalid orderings are considered the same as missing.""" ev1 = _create_event("!abc:test", "foo\n") ev2 = _create_event("!xyz:test", "xyz") @@ -115,7 +119,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs self.handler = self.hs.get_room_summary_handler() @@ -223,7 +227,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): fed_handler.on_invite_request(fed_hostname, event, RoomVersions.V6) ) - def test_simple_space(self): + def test_simple_space(self) -> None: """Test a simple space with a single room.""" # The result should have the space and the room in it, along with a link # from space -> room. @@ -234,7 +238,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_large_space(self): + def test_large_space(self) -> None: """Test a space with a large number of rooms.""" rooms = [self.room] # Make at least 51 rooms that are part of the space. @@ -260,7 +264,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): result["rooms"] += result2["rooms"] self._assert_hierarchy(result, expected) - def test_visibility(self): + def test_visibility(self) -> None: """A user not in a space cannot inspect it.""" user2 = self.register_user("user2", "pass") token2 = self.login("user2", "pass") @@ -380,7 +384,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): self._assert_hierarchy(result2, [(self.space, [self.room])]) def _create_room_with_join_rule( - self, join_rule: str, room_version: Optional[str] = None, **extra_content + self, join_rule: str, room_version: Optional[str] = None, **extra_content: Any ) -> str: """Create a room with the given join rule and add it to the space.""" room_id = self.helper.create_room_as( @@ -403,7 +407,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): self._add_child(self.space, room_id, self.token) return room_id - def test_filtering(self): + def test_filtering(self) -> None: """ Rooms should be properly filtered to only include rooms the user has access to. """ @@ -476,7 +480,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_complex_space(self): + def test_complex_space(self) -> None: """ Create a "complex" space to see how it handles things like loops and subspaces. """ @@ -516,7 +520,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_pagination(self): + def test_pagination(self) -> None: """Test simple pagination works.""" room_ids = [] for i in range(1, 10): @@ -553,7 +557,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): self._assert_hierarchy(result, expected) self.assertNotIn("next_batch", result) - def test_invalid_pagination_token(self): + def test_invalid_pagination_token(self) -> None: """An invalid pagination token, or changing other parameters, shoudl be rejected.""" room_ids = [] for i in range(1, 10): @@ -604,7 +608,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): SynapseError, ) - def test_max_depth(self): + def test_max_depth(self) -> None: """Create a deep tree to test the max depth against.""" spaces = [self.space] rooms = [self.room] @@ -659,7 +663,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ] self._assert_hierarchy(result, expected) - def test_unknown_room_version(self): + def test_unknown_room_version(self) -> None: """ If a room with an unknown room version is encountered it should not cause the entire summary to skip. @@ -685,7 +689,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_fed_complex(self): + def test_fed_complex(self) -> None: """ Return data over federation and ensure that it is handled properly. """ @@ -722,7 +726,9 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): "world_readable": True, } - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return requested_room_entry, {subroom: child_room}, set() # Add a room to the space which is on another server. @@ -744,7 +750,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_fed_filtering(self): + def test_fed_filtering(self) -> None: """ Rooms returned over federation should be properly filtered to only include rooms the user has access to. @@ -853,7 +859,9 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ], ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return subspace_room_entry, dict(children_rooms), set() # Add a room to the space which is on another server. @@ -892,7 +900,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_fed_invited(self): + def test_fed_invited(self) -> None: """ A room which the user was invited to should be included in the response. @@ -915,7 +923,9 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): }, ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return fed_room_entry, {}, set() # Add a room to the space which is on another server. @@ -936,7 +946,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): ) self._assert_hierarchy(result, expected) - def test_fed_caching(self): + def test_fed_caching(self) -> None: """ Federation `/hierarchy` responses should be cached. """ @@ -1023,7 +1033,7 @@ class RoomSummaryTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs self.handler = self.hs.get_room_summary_handler() @@ -1040,12 +1050,12 @@ class RoomSummaryTestCase(unittest.HomeserverTestCase): tok=self.token, ) - def test_own_room(self): + def test_own_room(self) -> None: """Test a simple room created by the requester.""" result = self.get_success(self.handler.get_room_summary(self.user, self.room)) self.assertEqual(result.get("room_id"), self.room) - def test_visibility(self): + def test_visibility(self) -> None: """A user not in a private room cannot get its summary.""" user2 = self.register_user("user2", "pass") token2 = self.login("user2", "pass") @@ -1093,7 +1103,7 @@ class RoomSummaryTestCase(unittest.HomeserverTestCase): result = self.get_success(self.handler.get_room_summary(user2, self.room)) self.assertEqual(result.get("room_id"), self.room) - def test_fed(self): + def test_fed(self) -> None: """ Return data over federation and ensure that it is handled properly. """ @@ -1105,7 +1115,9 @@ class RoomSummaryTestCase(unittest.HomeserverTestCase): {"room_id": fed_room, "world_readable": True}, ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return requested_room_entry, {}, set() with mock.patch( diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index a0f84e2940..9b1b8b9f13 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, Set, Tuple from unittest.mock import Mock import attr @@ -20,7 +20,9 @@ import attr from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import RedirectException +from synapse.module_api import ModuleApi from synapse.server import HomeServer +from synapse.types import JsonDict from synapse.util import Clock from tests.test_utils import simple_async_mock @@ -29,6 +31,7 @@ from tests.unittest import HomeserverTestCase, override_config # Check if we have the dependencies to run the tests. try: import saml2.config + import saml2.response from saml2.sigver import SigverError has_saml2 = True @@ -56,31 +59,39 @@ class FakeAuthnResponse: class TestMappingProvider: - def __init__(self, config, module): + def __init__(self, config: None, module: ModuleApi): pass @staticmethod - def parse_config(config): - return + def parse_config(config: JsonDict) -> None: + return None @staticmethod - def get_saml_attributes(config): + def get_saml_attributes(config: None) -> Tuple[Set[str], Set[str]]: return {"uid"}, {"displayName"} - def get_remote_user_id(self, saml_response, client_redirect_url): + def get_remote_user_id( + self, saml_response: "saml2.response.AuthnResponse", client_redirect_url: str + ) -> str: return saml_response.ava["uid"] def saml_response_to_user_attributes( - self, saml_response, failures, client_redirect_url - ): + self, + saml_response: "saml2.response.AuthnResponse", + failures: int, + client_redirect_url: str, + ) -> dict: localpart = saml_response.ava["username"] + (str(failures) if failures else "") return {"mxid_localpart": localpart, "displayname": None} class TestRedirectMappingProvider(TestMappingProvider): def saml_response_to_user_attributes( - self, saml_response, failures, client_redirect_url - ): + self, + saml_response: "saml2.response.AuthnResponse", + failures: int, + client_redirect_url: str, + ) -> dict: raise RedirectException(b"https://custom-saml-redirect/") @@ -347,7 +358,7 @@ class SamlHandlerTestCase(HomeserverTestCase): ) -def _mock_request(): +def _mock_request() -> Mock: """Returns a mock which will stand in as a SynapseRequest""" mock = Mock( spec=[ diff --git a/tests/handlers/test_send_email.py b/tests/handlers/test_send_email.py index da4bf8b582..8b6e4a40b6 100644 --- a/tests/handlers/test_send_email.py +++ b/tests/handlers/test_send_email.py @@ -13,7 +13,7 @@ # limitations under the License. -from typing import List, Tuple +from typing import Callable, List, Tuple from zope.interface import implementer @@ -28,20 +28,27 @@ from tests.unittest import HomeserverTestCase, override_config @implementer(interfaces.IMessageDelivery) class _DummyMessageDelivery: - def __init__(self): + def __init__(self) -> None: # (recipient, message) tuples self.messages: List[Tuple[smtp.Address, bytes]] = [] - def receivedHeader(self, helo, origin, recipients): + def receivedHeader( + self, + helo: Tuple[bytes, bytes], + origin: smtp.Address, + recipients: List[smtp.User], + ) -> None: return None - def validateFrom(self, helo, origin): + def validateFrom( + self, helo: Tuple[bytes, bytes], origin: smtp.Address + ) -> smtp.Address: return origin - def record_message(self, recipient: smtp.Address, message: bytes): + def record_message(self, recipient: smtp.Address, message: bytes) -> None: self.messages.append((recipient, message)) - def validateTo(self, user: smtp.User): + def validateTo(self, user: smtp.User) -> Callable[[], interfaces.IMessageSMTP]: return lambda: _DummyMessage(self, user) @@ -56,20 +63,20 @@ class _DummyMessage: self._user = user self._buffer: List[bytes] = [] - def lineReceived(self, line): + def lineReceived(self, line: bytes) -> None: self._buffer.append(line) - def eomReceived(self): + def eomReceived(self) -> "defer.Deferred[bytes]": message = b"\n".join(self._buffer) + b"\n" self._delivery.record_message(self._user.dest, message) return defer.succeed(b"saved") - def connectionLost(self): + def connectionLost(self) -> None: pass class SendEmailHandlerTestCase(HomeserverTestCase): - def test_send_email(self): + def test_send_email(self) -> None: """Happy-path test that we can send email to a non-TLS server.""" h = self.hs.get_send_email_handler() d = ensureDeferred( @@ -119,7 +126,7 @@ class SendEmailHandlerTestCase(HomeserverTestCase): }, } ) - def test_send_email_force_tls(self): + def test_send_email_force_tls(self) -> None: """Happy-path test that we can send email to an Implicit TLS server.""" h = self.hs.get_send_email_handler() d = ensureDeferred( diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index 05f9ec3c51..f1a50c5bcb 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -12,9 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Dict, List, Optional + +from twisted.test.proto_helpers import MemoryReactor + from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.storage.databases.main import stats +from synapse.util import Clock from tests import unittest @@ -32,11 +38,11 @@ class StatsRoomTests(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.handler = self.hs.get_stats_handler() - def _add_background_updates(self): + def _add_background_updates(self) -> None: """ Add the background updates we need to run. """ @@ -63,12 +69,14 @@ class StatsRoomTests(unittest.HomeserverTestCase): ) ) - async def get_all_room_state(self): + async def get_all_room_state(self) -> List[Dict[str, Any]]: return await self.store.db_pool.simple_select_list( "room_stats_state", None, retcols=("name", "topic", "canonical_alias") ) - def _get_current_stats(self, stats_type, stat_id): + def _get_current_stats( + self, stats_type: str, stat_id: str + ) -> Optional[Dict[str, Any]]: table, id_col = stats.TYPE_TO_TABLE[stats_type] cols = list(stats.ABSOLUTE_STATS_FIELDS[stats_type]) @@ -82,13 +90,13 @@ class StatsRoomTests(unittest.HomeserverTestCase): ) ) - def _perform_background_initial_update(self): + def _perform_background_initial_update(self) -> None: # Do the initial population of the stats via the background update self._add_background_updates() self.wait_for_background_updates() - def test_initial_room(self): + def test_initial_room(self) -> None: """ The background updates will build the table from scratch. """ @@ -125,7 +133,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.assertEqual(len(r), 1) self.assertEqual(r[0]["topic"], "foo") - def test_create_user(self): + def test_create_user(self) -> None: """ When we create a user, it should have statistics already ready. """ @@ -134,12 +142,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): u1stats = self._get_current_stats("user", u1) - self.assertIsNotNone(u1stats) + assert u1stats is not None # not in any rooms by default self.assertEqual(u1stats["joined_rooms"], 0) - def test_create_room(self): + def test_create_room(self) -> None: """ When we create a room, it should have statistics already ready. """ @@ -153,8 +161,8 @@ class StatsRoomTests(unittest.HomeserverTestCase): r2 = self.helper.create_room_as(u1, tok=u1token, is_public=False) r2stats = self._get_current_stats("room", r2) - self.assertIsNotNone(r1stats) - self.assertIsNotNone(r2stats) + assert r1stats is not None + assert r2stats is not None self.assertEqual( r1stats["current_state_events"], EXPT_NUM_STATE_EVTS_IN_FRESH_PUBLIC_ROOM @@ -171,7 +179,9 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.assertEqual(r2stats["invited_members"], 0) self.assertEqual(r2stats["banned_members"], 0) - def test_updating_profile_information_does_not_increase_joined_members_count(self): + def test_updating_profile_information_does_not_increase_joined_members_count( + self, + ) -> None: """ Check that the joined_members count does not increase when a user changes their profile information (which is done by sending another join membership event into @@ -186,6 +196,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): # Get the current room stats r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None # Send a profile update into the room new_profile = {"displayname": "bob"} @@ -195,6 +206,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): # Get the new room stats r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None # Ensure that the user count did not changed self.assertEqual(r1stats_post["joined_members"], r1stats_ante["joined_members"]) @@ -202,7 +214,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["local_users_in_room"], r1stats_ante["local_users_in_room"] ) - def test_send_state_event_nonoverwriting(self): + def test_send_state_event_nonoverwriting(self) -> None: """ When we send a non-overwriting state event, it increments current_state_events """ @@ -218,19 +230,21 @@ class StatsRoomTests(unittest.HomeserverTestCase): ) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.send_state( r1, "cat.hissing", {"value": False}, tok=u1token, state_key="moggy" ) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], 1, ) - def test_join_first_time(self): + def test_join_first_time(self) -> None: """ When a user joins a room for the first time, current_state_events and joined_members should increase by exactly 1. @@ -246,10 +260,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): u2token = self.login("u2", "pass") r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -259,7 +275,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["joined_members"] - r1stats_ante["joined_members"], 1 ) - def test_join_after_leave(self): + def test_join_after_leave(self) -> None: """ When a user joins a room after being previously left, joined_members should increase by exactly 1. @@ -280,10 +296,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.helper.leave(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -296,7 +314,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["left_members"] - r1stats_ante["left_members"], -1 ) - def test_invited(self): + def test_invited(self) -> None: """ When a user invites another user, current_state_events and invited_members should increase by exactly 1. @@ -311,10 +329,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): u2 = self.register_user("u2", "pass") r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.invite(r1, u1, u2, tok=u1token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -324,7 +344,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["invited_members"] - r1stats_ante["invited_members"], +1 ) - def test_join_after_invite(self): + def test_join_after_invite(self) -> None: """ When a user joins a room after being invited and joined_members should increase by exactly 1. @@ -344,10 +364,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.helper.invite(r1, u1, u2, tok=u1token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -360,7 +382,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["invited_members"] - r1stats_ante["invited_members"], -1 ) - def test_left(self): + def test_left(self) -> None: """ When a user leaves a room after joining and left_members should increase by exactly 1. @@ -380,10 +402,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.helper.join(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.leave(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -396,7 +420,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["joined_members"] - r1stats_ante["joined_members"], -1 ) - def test_banned(self): + def test_banned(self) -> None: """ When a user is banned from a room after joining and left_members should increase by exactly 1. @@ -416,10 +440,12 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.helper.join(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.change_membership(r1, u1, u2, "ban", tok=u1token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -432,7 +458,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats_post["joined_members"] - r1stats_ante["joined_members"], -1 ) - def test_initial_background_update(self): + def test_initial_background_update(self) -> None: """ Test that statistics can be generated by the initial background update handler. @@ -462,6 +488,9 @@ class StatsRoomTests(unittest.HomeserverTestCase): r1stats = self._get_current_stats("room", r1) u1stats = self._get_current_stats("user", u1) + assert r1stats is not None + assert u1stats is not None + self.assertEqual(r1stats["joined_members"], 1) self.assertEqual( r1stats["current_state_events"], EXPT_NUM_STATE_EVTS_IN_FRESH_PUBLIC_ROOM @@ -469,7 +498,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.assertEqual(u1stats["joined_rooms"], 1) - def test_incomplete_stats(self): + def test_incomplete_stats(self) -> None: """ This tests that we track incomplete statistics. @@ -533,8 +562,11 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.wait_for_background_updates() r1stats_complete = self._get_current_stats("room", r1) + assert r1stats_complete is not None u1stats_complete = self._get_current_stats("user", u1) + assert u1stats_complete is not None u2stats_complete = self._get_current_stats("user", u2) + assert u2stats_complete is not None # now we make our assertions diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index ab5c101eb7..0d9a3de92a 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -14,6 +14,8 @@ from typing import Optional from unittest.mock import MagicMock, Mock, patch +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes, JoinRules from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import Filtering @@ -23,6 +25,7 @@ from synapse.rest import admin from synapse.rest.client import knock, login, room from synapse.server import HomeServer from synapse.types import UserID, create_requester +from synapse.util import Clock import tests.unittest import tests.utils @@ -39,7 +42,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.sync_handler = self.hs.get_sync_handler() self.store = self.hs.get_datastores().main @@ -47,7 +50,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # modify its config instead of the hs' self.auth_blocking = self.hs.get_auth_blocking() - def test_wait_for_sync_for_user_auth_blocking(self): + def test_wait_for_sync_for_user_auth_blocking(self) -> None: user_id1 = "@user1:test" user_id2 = "@user2:test" sync_config = generate_sync_config(user_id1) @@ -82,7 +85,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ) self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) - def test_unknown_room_version(self): + def test_unknown_room_version(self) -> None: """ A room with an unknown room version should not break sync (and should be excluded). """ @@ -186,7 +189,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.assertNotIn(invite_room, [r.room_id for r in result.invited]) self.assertNotIn(knock_room, [r.room_id for r in result.knocked]) - def test_ban_wins_race_with_join(self): + def test_ban_wins_race_with_join(self) -> None: """Rooms shouldn't appear under "joined" if a join loses a race to a ban. A complicated edge case. Imagine the following scenario: |