diff options
685 files changed, 3793 insertions, 2498 deletions
diff --git a/.buildkite/scripts/create_postgres_db.py b/.buildkite/scripts/create_postgres_db.py index 956339de5c..cc829db216 100755 --- a/.buildkite/scripts/create_postgres_db.py +++ b/.buildkite/scripts/create_postgres_db.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/CHANGES.md b/CHANGES.md index 532b30e232..206859cff7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,74 @@ +Synapse 1.33.0 (2021-05-05) +=========================== + +Features +-------- + +- Build Debian packages for Ubuntu 21.04 (Hirsute Hippo). ([\#9909](https://github.com/matrix-org/synapse/issues/9909)) + + +Synapse 1.33.0rc2 (2021-04-29) +============================== + +Bugfixes +-------- + +- Fix tight loop when handling presence replication when using workers. Introduced in v1.33.0rc1. ([\#9900](https://github.com/matrix-org/synapse/issues/9900)) + + +Synapse 1.33.0rc1 (2021-04-28) +============================== + +Features +-------- + +- Update experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership. ([\#9800](https://github.com/matrix-org/synapse/issues/9800), [\#9814](https://github.com/matrix-org/synapse/issues/9814)) +- Add experimental support for handling presence on a worker. ([\#9819](https://github.com/matrix-org/synapse/issues/9819), [\#9820](https://github.com/matrix-org/synapse/issues/9820), [\#9828](https://github.com/matrix-org/synapse/issues/9828), [\#9850](https://github.com/matrix-org/synapse/issues/9850)) +- Return a new template when an user attempts to renew their account multiple times with the same token, stating that their account is set to expire. This replaces the invalid token template that would previously be shown in this case. This change concerns the optional account validity feature. ([\#9832](https://github.com/matrix-org/synapse/issues/9832)) + + +Bugfixes +-------- + +- Fixes the OIDC SSO flow when using a `public_baseurl` value including a non-root URL path. ([\#9726](https://github.com/matrix-org/synapse/issues/9726)) +- Fix thumbnail generation for some sites with non-standard content types. Contributed by @rkfg. ([\#9788](https://github.com/matrix-org/synapse/issues/9788)) +- Add some sanity checks to identity server passed to 3PID bind/unbind endpoints. ([\#9802](https://github.com/matrix-org/synapse/issues/9802)) +- Limit the size of HTTP responses read over federation. ([\#9833](https://github.com/matrix-org/synapse/issues/9833)) +- Fix a bug which could cause Synapse to get stuck in a loop of resyncing device lists. ([\#9867](https://github.com/matrix-org/synapse/issues/9867)) +- Fix a long-standing bug where errors from federation did not propagate to the client. ([\#9868](https://github.com/matrix-org/synapse/issues/9868)) + + +Improved Documentation +---------------------- + +- Add a note to the docker docs mentioning that we mirror upstream's supported Docker platforms. ([\#9801](https://github.com/matrix-org/synapse/issues/9801)) + + +Internal Changes +---------------- + +- Add a dockerfile for running Synapse in worker-mode under Complement. ([\#9162](https://github.com/matrix-org/synapse/issues/9162)) +- Apply `pyupgrade` across the codebase. ([\#9786](https://github.com/matrix-org/synapse/issues/9786)) +- Move some replication processing out of `generic_worker`. ([\#9796](https://github.com/matrix-org/synapse/issues/9796)) +- Replace `HomeServer.get_config()` with inline references. ([\#9815](https://github.com/matrix-org/synapse/issues/9815)) +- Rename some handlers and config modules to not duplicate the top-level module. ([\#9816](https://github.com/matrix-org/synapse/issues/9816)) +- Fix a long-standing bug which caused `max_upload_size` to not be correctly enforced. ([\#9817](https://github.com/matrix-org/synapse/issues/9817)) +- Reduce CPU usage of the user directory by reusing existing calculated room membership. ([\#9821](https://github.com/matrix-org/synapse/issues/9821)) +- Small speed up for joining large remote rooms. ([\#9825](https://github.com/matrix-org/synapse/issues/9825)) +- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9838](https://github.com/matrix-org/synapse/issues/9838)) +- Only store the raw data in the in-memory caches, rather than objects that include references to e.g. the data stores. ([\#9845](https://github.com/matrix-org/synapse/issues/9845)) +- Limit length of accepted email addresses. ([\#9855](https://github.com/matrix-org/synapse/issues/9855)) +- Remove redundant `synapse.types.Collection` type definition. ([\#9856](https://github.com/matrix-org/synapse/issues/9856)) +- Handle recently added rate limits correctly when using `--no-rate-limit` with the demo scripts. ([\#9858](https://github.com/matrix-org/synapse/issues/9858)) +- Disable invite rate-limiting by default when running the unit tests. ([\#9871](https://github.com/matrix-org/synapse/issues/9871)) +- Pass a reactor into `SynapseSite` to make testing easier. ([\#9874](https://github.com/matrix-org/synapse/issues/9874)) +- Make `DomainSpecificString` an `attrs` class. ([\#9875](https://github.com/matrix-org/synapse/issues/9875)) +- Add type hints to `synapse.api.auth` and `synapse.api.auth_blocking` modules. ([\#9876](https://github.com/matrix-org/synapse/issues/9876)) +- Remove redundant `_PushHTTPChannel` test class. ([\#9878](https://github.com/matrix-org/synapse/issues/9878)) +- Remove backwards-compatibility code for Python versions < 3.6. ([\#9879](https://github.com/matrix-org/synapse/issues/9879)) +- Small performance improvement around handling new local presence updates. ([\#9887](https://github.com/matrix-org/synapse/issues/9887)) + + Synapse 1.32.2 (2021-04-22) =========================== diff --git a/UPGRADE.rst b/UPGRADE.rst index 6af35bc38f..e921e0c08a 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -85,6 +85,29 @@ for example: wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb +Upgrading to v1.33.0 +==================== + +Account Validity HTML templates can now display a user's expiration date +------------------------------------------------------------------------ + +This may affect you if you have enabled the account validity feature, and have made use of a +custom HTML template specified by the ``account_validity.template_dir`` or ``account_validity.account_renewed_html_path`` +Synapse config options. + +The template can now accept an ``expiration_ts`` variable, which represents the unix timestamp in milliseconds for the +future date of which their account has been renewed until. See the +`default template <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_renewed.html>`_ +for an example of usage. + +ALso note that a new HTML template, ``account_previously_renewed.html``, has been added. This is is shown to users +when they attempt to renew their account with a valid renewal token that has already been used before. The default +template contents can been found +`here <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_previously_renewed.html>`_, +and can also accept an ``expiration_ts`` variable. This template replaces the error message users would previously see +upon attempting to use a valid renewal token more than once. + + Upgrading to v1.32.0 ==================== diff --git a/contrib/cmdclient/http.py b/contrib/cmdclient/http.py index 1cf913756e..1310f078e3 100644 --- a/contrib/cmdclient/http.py +++ b/contrib/cmdclient/http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py index 7fbc7d8fc6..31b8a68225 100644 --- a/contrib/experiments/test_messaging.py +++ b/contrib/experiments/test_messaging.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/debian/changelog b/debian/changelog index fd33bfda5c..b54d3f2bc6 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.33.0) stable; urgency=medium + + * New synapse release 1.33.0. + + -- Synapse Packaging team <packages@matrix.org> Wed, 05 May 2021 14:15:27 +0100 + matrix-synapse-py3 (1.32.2) stable; urgency=medium * New synapse release 1.32.2. diff --git a/demo/start.sh b/demo/start.sh index 621a5698b8..bc4854091b 100755 --- a/demo/start.sh +++ b/demo/start.sh @@ -96,18 +96,48 @@ for port in 8080 8081 8082; do # Check script parameters if [ $# -eq 1 ]; then if [ $1 = "--no-rate-limit" ]; then - # messages rate limit - echo 'rc_messages_per_second: 1000' >> $DIR/etc/$port.config - echo 'rc_message_burst_count: 1000' >> $DIR/etc/$port.config - - # registration rate limit - printf 'rc_registration:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config - - # login rate limit - echo 'rc_login:' >> $DIR/etc/$port.config - printf ' address:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config - printf ' account:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config - printf ' failed_attempts:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config + + # Disable any rate limiting + ratelimiting=$(cat <<-RC + rc_message: + per_second: 1000 + burst_count: 1000 + rc_registration: + per_second: 1000 + burst_count: 1000 + rc_login: + address: + per_second: 1000 + burst_count: 1000 + account: + per_second: 1000 + burst_count: 1000 + failed_attempts: + per_second: 1000 + burst_count: 1000 + rc_admin_redaction: + per_second: 1000 + burst_count: 1000 + rc_joins: + local: + per_second: 1000 + burst_count: 1000 + remote: + per_second: 1000 + burst_count: 1000 + rc_3pid_validation: + per_second: 1000 + burst_count: 1000 + rc_invites: + per_room: + per_second: 1000 + burst_count: 1000 + per_user: + per_second: 1000 + burst_count: 1000 + RC + ) + echo "${ratelimiting}" >> $DIR/etc/$port.config fi fi diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers new file mode 100644 index 0000000000..969cf97286 --- /dev/null +++ b/docker/Dockerfile-workers @@ -0,0 +1,23 @@ +# Inherit from the official Synapse docker image +FROM matrixdotorg/synapse + +# Install deps +RUN apt-get update +RUN apt-get install -y supervisor redis nginx + +# Remove the default nginx sites +RUN rm /etc/nginx/sites-enabled/default + +# Copy Synapse worker, nginx and supervisord configuration template files +COPY ./docker/conf-workers/* /conf/ + +# Expose nginx listener port +EXPOSE 8080/tcp + +# Volume for user-editable config files, logs etc. +VOLUME ["/data"] + +# A script to read environment variables and create the necessary +# files to run the desired worker configuration. Will start supervisord. +COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py +ENTRYPOINT ["/configure_workers_and_start.py"] diff --git a/docker/README-testing.md b/docker/README-testing.md new file mode 100644 index 0000000000..6a5baf9e28 --- /dev/null +++ b/docker/README-testing.md @@ -0,0 +1,140 @@ +# Running tests against a dockerised Synapse + +It's possible to run integration tests against Synapse +using [Complement](https://github.com/matrix-org/complement). Complement is a Matrix Spec +compliance test suite for homeservers, and supports any homeserver docker image configured +to listen on ports 8008/8448. This document contains instructions for building Synapse +docker images that can be run inside Complement for testing purposes. + +Note that running Synapse's unit tests from within the docker image is not supported. + +## Testing with SQLite and single-process Synapse + +> Note that `scripts-dev/complement.sh` is a script that will automatically build +> and run an SQLite-based, single-process of Synapse against Complement. + +The instructions below will set up Complement testing for a single-process, +SQLite-based Synapse deployment. + +Start by building the base Synapse docker image. If you wish to run tests with the latest +release of Synapse, instead of your current checkout, you can skip this step. From the +root of the repository: + +```sh +docker build -t matrixdotorg/synapse -f docker/Dockerfile . +``` + +This will build an image with the tag `matrixdotorg/synapse`. + +Next, build the Synapse image for Complement. You will need a local checkout +of Complement. Change to the root of your Complement checkout and run: + +```sh +docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles +``` + +This will build an image with the tag `complement-synapse`, which can be handed to +Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to +[Complement's documentation](https://github.com/matrix-org/complement/#running) for +how to run the tests, as well as the various available command line flags. + +## Testing with PostgreSQL and single or multi-process Synapse + +The above docker image only supports running Synapse with SQLite and in a +single-process topology. The following instructions are used to build a Synapse image for +Complement that supports either single or multi-process topology with a PostgreSQL +database backend. + +As with the single-process image, build the base Synapse docker image. If you wish to run +tests with the latest release of Synapse, instead of your current checkout, you can skip +this step. From the root of the repository: + +```sh +docker build -t matrixdotorg/synapse -f docker/Dockerfile . +``` + +This will build an image with the tag `matrixdotorg/synapse`. + +Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`. +Again, from the root of the repository: + +```sh +docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers . +``` + +This will build an image with the tag` matrixdotorg/synapse-workers`. + +It's worth noting at this point that this image is fully functional, and +can be used for testing against locally. See instructions for using the container +under +[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone) +below. + +Finally, build the Synapse image for Complement, which is based on +`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to +the root of your Complement checkout and run: + +```sh +docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles +``` + +This will build an image with the tag `complement-synapse`, which can be handed to +Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to +[Complement's documentation](https://github.com/matrix-org/complement/#running) for +how to run the tests, as well as the various available command line flags. + +## Running the Dockerfile-worker image standalone + +For manual testing of a multi-process Synapse instance in Docker, +[Dockerfile-workers](Dockerfile-workers) is a Dockerfile that will produce an image +bundling all necessary components together for a workerised homeserver instance. + +This includes any desired Synapse worker processes, a nginx to route traffic accordingly, +a redis for worker communication and a supervisord instance to start up and monitor all +processes. You will need to provide your own postgres container to connect to, and TLS +is not handled by the container. + +Once you've built the image using the above instructions, you can run it. Be sure +you've set up a volume according to the [usual Synapse docker instructions](README.md). +Then run something along the lines of: + +``` +docker run -d --name synapse \ + --mount type=volume,src=synapse-data,dst=/data \ + -p 8008:8008 \ + -e SYNAPSE_SERVER_NAME=my.matrix.host \ + -e SYNAPSE_REPORT_STATS=no \ + -e POSTGRES_HOST=postgres \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=somesecret \ + -e SYNAPSE_WORKER_TYPES=synchrotron,media_repository,user_dir \ + -e SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1 \ + matrixdotorg/synapse-workers +``` + +...substituting `POSTGRES*` variables for those that match a postgres host you have +available (usually a running postgres docker container). + +The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to +use when running the container. All possible worker names are defined by the keys of the +`WORKERS_CONFIG` variable in [this script](configure_workers_and_start.py), which the +Dockerfile makes use of to generate appropriate worker, nginx and supervisord config +files. + +Sharding is supported for a subset of workers, in line with the +[worker documentation](../docs/workers.md). To run multiple instances of a given worker +type, simply specify the type multiple times in `SYNAPSE_WORKER_TYPES` +(e.g `SYNAPSE_WORKER_TYPES=event_creator,event_creator...`). + +Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no workers +(leaving only the main process). The container is configured to use redis-based worker +mode. + +Logs for workers and the main process are logged to stdout and can be viewed with +standard `docker logs` tooling. Worker logs contain their worker name +after the timestamp. + +Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to +`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00: +00, according to the container's clock. Logging for the main process must still be +configured by modifying the homeserver's log config in your Synapse data volume. diff --git a/docker/README.md b/docker/README.md index 3a7dc585e7..a7d1e670fe 100644 --- a/docker/README.md +++ b/docker/README.md @@ -2,13 +2,16 @@ This Docker image will run Synapse as a single process. By default it uses a sqlite database; for production use you should connect it to a separate -postgres database. +postgres database. The image also does *not* provide a TURN server. -The image also does *not* provide a TURN server. +This image should work on all platforms that are supported by Docker upstream. +Note that Docker's WS1-backend Linux Containers on Windows +platform is [experimental](https://github.com/docker/for-win/issues/6470) and +is not supported by this image. ## Volumes -By default, the image expects a single volume, located at ``/data``, that will hold: +By default, the image expects a single volume, located at `/data`, that will hold: * configuration files; * uploaded media and thumbnails; @@ -16,11 +19,11 @@ By default, the image expects a single volume, located at ``/data``, that will h * the appservices configuration. You are free to use separate volumes depending on storage endpoints at your -disposal. For instance, ``/data/media`` could be stored on a large but low +disposal. For instance, `/data/media` could be stored on a large but low performance hdd storage while other files could be stored on high performance endpoints. -In order to setup an application service, simply create an ``appservices`` +In order to setup an application service, simply create an `appservices` directory in the data volume and write the application service Yaml configuration file there. Multiple application services are supported. @@ -53,6 +56,8 @@ The following environment variables are supported in `generate` mode: * `SYNAPSE_SERVER_NAME` (mandatory): the server public hostname. * `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable anonymous statistics reporting. +* `SYNAPSE_HTTP_PORT`: the port Synapse should listen on for http traffic. + Defaults to `8008`. * `SYNAPSE_CONFIG_DIR`: where additional config files (such as the log config and event signing key) will be stored. Defaults to `/data`. * `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to @@ -73,6 +78,8 @@ docker run -d --name synapse \ matrixdotorg/synapse:latest ``` +(assuming 8008 is the port Synapse is configured to listen on for http traffic.) + You can then check that it has started correctly with: ``` @@ -208,4 +215,4 @@ healthcheck: ## Using jemalloc Jemalloc is embedded in the image and will be used instead of the default allocator. -You can read about jemalloc by reading the Synapse [README](../README.md) \ No newline at end of file +You can read about jemalloc by reading the Synapse [README](../README.md). diff --git a/docker/conf-workers/nginx.conf.j2 b/docker/conf-workers/nginx.conf.j2 new file mode 100644 index 0000000000..1081979e06 --- /dev/null +++ b/docker/conf-workers/nginx.conf.j2 @@ -0,0 +1,27 @@ +# This file contains the base config for the reverse proxy, as part of ../Dockerfile-workers. +# configure_workers_and_start.py uses and amends to this file depending on the workers +# that have been selected. + +{{ upstream_directives }} + +server { + # Listen on an unoccupied port number + listen 8008; + listen [::]:8008; + + server_name localhost; + + # Nginx by default only allows file uploads up to 1M in size + # Increase client_max_body_size to match max_upload_size defined in homeserver.yaml + client_max_body_size 100M; + +{{ worker_locations }} + + # Send all other traffic to the main process + location ~* ^(\\/_matrix|\\/_synapse) { + proxy_pass http://localhost:8080; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $host; + } +} diff --git a/docker/conf-workers/shared.yaml.j2 b/docker/conf-workers/shared.yaml.j2 new file mode 100644 index 0000000000..f94b8c6aca --- /dev/null +++ b/docker/conf-workers/shared.yaml.j2 @@ -0,0 +1,9 @@ +# This file contains the base for the shared homeserver config file between Synapse workers, +# as part of ./Dockerfile-workers. +# configure_workers_and_start.py uses and amends to this file depending on the workers +# that have been selected. + +redis: + enabled: true + +{{ shared_worker_config }} \ No newline at end of file diff --git a/docker/conf-workers/supervisord.conf.j2 b/docker/conf-workers/supervisord.conf.j2 new file mode 100644 index 0000000000..0de2c6143b --- /dev/null +++ b/docker/conf-workers/supervisord.conf.j2 @@ -0,0 +1,41 @@ +# This file contains the base config for supervisord, as part of ../Dockerfile-workers. +# configure_workers_and_start.py uses and amends to this file depending on the workers +# that have been selected. +[supervisord] +nodaemon=true +user=root + +[program:nginx] +command=/usr/sbin/nginx -g "daemon off;" +priority=500 +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +username=www-data +autorestart=true + +[program:redis] +command=/usr/bin/redis-server /etc/redis/redis.conf --daemonize no +priority=1 +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +username=redis +autorestart=true + +[program:synapse_main] +command=/usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml +priority=10 +# Log startup failures to supervisord's stdout/err +# Regular synapse logs will still go in the configured data directory +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +autorestart=unexpected +exitcodes=0 + +# Additional process blocks +{{ worker_config }} \ No newline at end of file diff --git a/docker/conf-workers/worker.yaml.j2 b/docker/conf-workers/worker.yaml.j2 new file mode 100644 index 0000000000..42131afc95 --- /dev/null +++ b/docker/conf-workers/worker.yaml.j2 @@ -0,0 +1,26 @@ +# This is a configuration template for a single worker instance, and is +# used by Dockerfile-workers. +# Values will be change depending on whichever workers are selected when +# running that image. + +worker_app: "{{ app }}" +worker_name: "{{ name }}" + +# The replication listener on the main synapse process. +worker_replication_host: 127.0.0.1 +worker_replication_http_port: 9093 + +worker_listeners: + - type: http + port: {{ port }} +{% if listener_resources %} + resources: + - names: +{%- for resource in listener_resources %} + - {{ resource }} +{%- endfor %} +{% endif %} + +worker_log_config: {{ worker_log_config_filepath }} + +{{ worker_extra_conf }} diff --git a/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml index a792899540..2b23d7f428 100644 --- a/docker/conf/homeserver.yaml +++ b/docker/conf/homeserver.yaml @@ -40,7 +40,9 @@ listeners: compress: false {% endif %} - - port: 8008 + # Allow configuring in case we want to reverse proxy 8008 + # using another process in the same container + - port: {{ SYNAPSE_HTTP_PORT or 8008 }} tls: false bind_addresses: ['::'] type: http diff --git a/docker/conf/log.config b/docker/conf/log.config index 491bbcc87a..34572bc0f3 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -2,9 +2,34 @@ version: 1 formatters: precise: - format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s' +{% if worker_name %} + format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s' +{% else %} + format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s' +{% endif %} handlers: + file: + class: logging.handlers.TimedRotatingFileHandler + formatter: precise + filename: {{ LOG_FILE_PATH or "homeserver.log" }} + when: "midnight" + backupCount: 6 # Does not include the current log file. + encoding: utf8 + + # Default to buffering writes to log file for efficiency. This means that + # there will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR + # logs will still be flushed immediately. + buffer: + class: logging.handlers.MemoryHandler + target: file + # The capacity is the number of log lines that are buffered before + # being written to disk. Increasing this will lead to better + # performance, at the expensive of it taking longer for log lines to + # be written to disk. + capacity: 10 + flushLevel: 30 # Flush for WARNING logs as well + console: class: logging.StreamHandler formatter: precise @@ -17,6 +42,11 @@ loggers: root: level: {{ SYNAPSE_LOG_LEVEL or "INFO" }} + +{% if LOG_FILE_PATH %} + handlers: [console, buffer] +{% else %} handlers: [console] +{% endif %} disable_existing_loggers: false diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py new file mode 100755 index 0000000000..4be6afc65d --- /dev/null +++ b/docker/configure_workers_and_start.py @@ -0,0 +1,558 @@ +#!/usr/bin/env python +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script reads environment variables and generates a shared Synapse worker, +# nginx and supervisord configs depending on the workers requested. +# +# The environment variables it reads are: +# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver. +# * SYNAPSE_REPORT_STATS: Whether to report stats. +# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG +# below. Leave empty for no workers, or set to '*' for all possible workers. +# +# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined +# in the project's README), this script may be run multiple times, and functionality should +# continue to work if so. + +import os +import subprocess +import sys + +import jinja2 +import yaml + +MAIN_PROCESS_HTTP_LISTENER_PORT = 8080 + + +WORKERS_CONFIG = { + "pusher": { + "app": "synapse.app.pusher", + "listener_resources": [], + "endpoint_patterns": [], + "shared_extra_conf": {"start_pushers": False}, + "worker_extra_conf": "", + }, + "user_dir": { + "app": "synapse.app.user_dir", + "listener_resources": ["client"], + "endpoint_patterns": [ + "^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$" + ], + "shared_extra_conf": {"update_user_directory": False}, + "worker_extra_conf": "", + }, + "media_repository": { + "app": "synapse.app.media_repository", + "listener_resources": ["media"], + "endpoint_patterns": [ + "^/_matrix/media/", + "^/_synapse/admin/v1/purge_media_cache$", + "^/_synapse/admin/v1/room/.*/media.*$", + "^/_synapse/admin/v1/user/.*/media.*$", + "^/_synapse/admin/v1/media/.*$", + "^/_synapse/admin/v1/quarantine_media/.*$", + ], + "shared_extra_conf": {"enable_media_repo": False}, + "worker_extra_conf": "enable_media_repo: true", + }, + "appservice": { + "app": "synapse.app.appservice", + "listener_resources": [], + "endpoint_patterns": [], + "shared_extra_conf": {"notify_appservices": False}, + "worker_extra_conf": "", + }, + "federation_sender": { + "app": "synapse.app.federation_sender", + "listener_resources": [], + "endpoint_patterns": [], + "shared_extra_conf": {"send_federation": False}, + "worker_extra_conf": "", + }, + "synchrotron": { + "app": "synapse.app.generic_worker", + "listener_resources": ["client"], + "endpoint_patterns": [ + "^/_matrix/client/(v2_alpha|r0)/sync$", + "^/_matrix/client/(api/v1|v2_alpha|r0)/events$", + "^/_matrix/client/(api/v1|r0)/initialSync$", + "^/_matrix/client/(api/v1|r0)/rooms/[^/]+/initialSync$", + ], + "shared_extra_conf": {}, + "worker_extra_conf": "", + }, + "federation_reader": { + "app": "synapse.app.generic_worker", + "listener_resources": ["federation"], + "endpoint_patterns": [ + "^/_matrix/federation/(v1|v2)/event/", + "^/_matrix/federation/(v1|v2)/state/", + "^/_matrix/federation/(v1|v2)/state_ids/", + "^/_matrix/federation/(v1|v2)/backfill/", + "^/_matrix/federation/(v1|v2)/get_missing_events/", + "^/_matrix/federation/(v1|v2)/publicRooms", + "^/_matrix/federation/(v1|v2)/query/", + "^/_matrix/federation/(v1|v2)/make_join/", + "^/_matrix/federation/(v1|v2)/make_leave/", + "^/_matrix/federation/(v1|v2)/send_join/", + "^/_matrix/federation/(v1|v2)/send_leave/", + "^/_matrix/federation/(v1|v2)/invite/", + "^/_matrix/federation/(v1|v2)/query_auth/", + "^/_matrix/federation/(v1|v2)/event_auth/", + "^/_matrix/federation/(v1|v2)/exchange_third_party_invite/", + "^/_matrix/federation/(v1|v2)/user/devices/", + "^/_matrix/federation/(v1|v2)/get_groups_publicised$", + "^/_matrix/key/v2/query", + ], + "shared_extra_conf": {}, + "worker_extra_conf": "", + }, + "federation_inbound": { + "app": "synapse.app.generic_worker", + "listener_resources": ["federation"], + "endpoint_patterns": ["/_matrix/federation/(v1|v2)/send/"], + "shared_extra_conf": {}, + "worker_extra_conf": "", + }, + "event_persister": { + "app": "synapse.app.generic_worker", + "listener_resources": ["replication"], + "endpoint_patterns": [], + "shared_extra_conf": {}, + "worker_extra_conf": "", + }, + "background_worker": { + "app": "synapse.app.generic_worker", + "listener_resources": [], + "endpoint_patterns": [], + # This worker cannot be sharded. Therefore there should only ever be one background + # worker, and it should be named background_worker1 + "shared_extra_conf": {"run_background_tasks_on": "background_worker1"}, + "worker_extra_conf": "", + }, + "event_creator": { + "app": "synapse.app.generic_worker", + "listener_resources": ["client"], + "endpoint_patterns": [ + "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact", + "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send", + "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$", + "^/_matrix/client/(api/v1|r0|unstable)/join/", + "^/_matrix/client/(api/v1|r0|unstable)/profile/", + ], + "shared_extra_conf": {}, + "worker_extra_conf": "", + }, + "frontend_proxy": { + "app": "synapse.app.frontend_proxy", + "listener_resources": ["client", "replication"], + "endpoint_patterns": ["^/_matrix/client/(api/v1|r0|unstable)/keys/upload"], + "shared_extra_conf": {}, + "worker_extra_conf": ( + "worker_main_http_uri: http://127.0.0.1:%d" + % (MAIN_PROCESS_HTTP_LISTENER_PORT,), + ), + }, +} + +# Templates for sections that may be inserted multiple times in config files +SUPERVISORD_PROCESS_CONFIG_BLOCK = """ +[program:synapse_{name}] +command=/usr/local/bin/python -m {app} \ + --config-path="{config_path}" \ + --config-path=/conf/workers/shared.yaml \ + --config-path=/conf/workers/{name}.yaml +autorestart=unexpected +priority=500 +exitcodes=0 +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +""" + +NGINX_LOCATION_CONFIG_BLOCK = """ + location ~* {endpoint} { + proxy_pass {upstream}; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $host; + } +""" + +NGINX_UPSTREAM_CONFIG_BLOCK = """ +upstream {upstream_worker_type} { +{body} +} +""" + + +# Utility functions +def log(txt: str): + """Log something to the stdout. + + Args: + txt: The text to log. + """ + print(txt) + + +def error(txt: str): + """Log something and exit with an error code. + + Args: + txt: The text to log in error. + """ + log(txt) + sys.exit(2) + + +def convert(src: str, dst: str, **template_vars): + """Generate a file from a template + + Args: + src: Path to the input file. + dst: Path to write to. + template_vars: The arguments to replace placeholder variables in the template with. + """ + # Read the template file + with open(src) as infile: + template = infile.read() + + # Generate a string from the template. We disable autoescape to prevent template + # variables from being escaped. + rendered = jinja2.Template(template, autoescape=False).render(**template_vars) + + # Write the generated contents to a file + # + # We use append mode in case the files have already been written to by something else + # (for instance, as part of the instructions in a dockerfile). + with open(dst, "a") as outfile: + # In case the existing file doesn't end with a newline + outfile.write("\n") + + outfile.write(rendered) + + +def add_sharding_to_shared_config( + shared_config: dict, + worker_type: str, + worker_name: str, + worker_port: int, +) -> None: + """Given a dictionary representing a config file shared across all workers, + append sharded worker information to it for the current worker_type instance. + + Args: + shared_config: The config dict that all worker instances share (after being converted to YAML) + worker_type: The type of worker (one of those defined in WORKERS_CONFIG). + worker_name: The name of the worker instance. + worker_port: The HTTP replication port that the worker instance is listening on. + """ + # The instance_map config field marks the workers that write to various replication streams + instance_map = shared_config.setdefault("instance_map", {}) + + # Worker-type specific sharding config + if worker_type == "pusher": + shared_config.setdefault("pusher_instances", []).append(worker_name) + + elif worker_type == "federation_sender": + shared_config.setdefault("federation_sender_instances", []).append(worker_name) + + elif worker_type == "event_persister": + # Event persisters write to the events stream, so we need to update + # the list of event stream writers + shared_config.setdefault("stream_writers", {}).setdefault("events", []).append( + worker_name + ) + + # Map of stream writer instance names to host/ports combos + instance_map[worker_name] = { + "host": "localhost", + "port": worker_port, + } + + elif worker_type == "media_repository": + # The first configured media worker will run the media background jobs + shared_config.setdefault("media_instance_running_background_jobs", worker_name) + + +def generate_base_homeserver_config(): + """Starts Synapse and generates a basic homeserver config, which will later be + modified for worker support. + + Raises: CalledProcessError if calling start.py returned a non-zero exit code. + """ + # start.py already does this for us, so just call that. + # note that this script is copied in in the official, monolith dockerfile + os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT) + subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"]) + + +def generate_worker_files(environ, config_path: str, data_dir: str): + """Read the desired list of workers from environment variables and generate + shared homeserver, nginx and supervisord configs. + + Args: + environ: _Environ[str] + config_path: Where to output the generated Synapse main worker config file. + data_dir: The location of the synapse data directory. Where log and + user-facing config files live. + """ + # Note that yaml cares about indentation, so care should be taken to insert lines + # into files at the correct indentation below. + + # shared_config is the contents of a Synapse config file that will be shared amongst + # the main Synapse process as well as all workers. + # It is intended mainly for disabling functionality when certain workers are spun up, + # and adding a replication listener. + + # First read the original config file and extract the listeners block. Then we'll add + # another listener for replication. Later we'll write out the result. + listeners = [ + { + "port": 9093, + "bind_address": "127.0.0.1", + "type": "http", + "resources": [{"names": ["replication"]}], + } + ] + with open(config_path) as file_stream: + original_config = yaml.safe_load(file_stream) + original_listeners = original_config.get("listeners") + if original_listeners: + listeners += original_listeners + + # The shared homeserver config. The contents of which will be inserted into the + # base shared worker jinja2 template. + # + # This config file will be passed to all workers, included Synapse's main process. + shared_config = {"listeners": listeners} + + # The supervisord config. The contents of which will be inserted into the + # base supervisord jinja2 template. + # + # Supervisord will be in charge of running everything, from redis to nginx to Synapse + # and all of its worker processes. Load the config template, which defines a few + # services that are necessary to run. + supervisord_config = "" + + # Upstreams for load-balancing purposes. This dict takes the form of a worker type to the + # ports of each worker. For example: + # { + # worker_type: {1234, 1235, ...}} + # } + # and will be used to construct 'upstream' nginx directives. + nginx_upstreams = {} + + # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be + # placed after the proxy_pass directive. The main benefit to representing this data as a + # dict over a str is that we can easily deduplicate endpoints across multiple instances + # of the same worker. + # + # An nginx site config that will be amended to depending on the workers that are + # spun up. To be placed in /etc/nginx/conf.d. + nginx_locations = {} + + # Read the desired worker configuration from the environment + worker_types = environ.get("SYNAPSE_WORKER_TYPES") + if worker_types is None: + # No workers, just the main process + worker_types = [] + else: + # Split type names by comma + worker_types = worker_types.split(",") + + # Create the worker configuration directory if it doesn't already exist + os.makedirs("/conf/workers", exist_ok=True) + + # Start worker ports from this arbitrary port + worker_port = 18009 + + # A counter of worker_type -> int. Used for determining the name for a given + # worker type when generating its config file, as each worker's name is just + # worker_type + instance # + worker_type_counter = {} + + # For each worker type specified by the user, create config values + for worker_type in worker_types: + worker_type = worker_type.strip() + + worker_config = WORKERS_CONFIG.get(worker_type) + if worker_config: + worker_config = worker_config.copy() + else: + log(worker_type + " is an unknown worker type! It will be ignored") + continue + + new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1 + worker_type_counter[worker_type] = new_worker_count + + # Name workers by their type concatenated with an incrementing number + # e.g. federation_reader1 + worker_name = worker_type + str(new_worker_count) + worker_config.update( + {"name": worker_name, "port": worker_port, "config_path": config_path} + ) + + # Update the shared config with any worker-type specific options + shared_config.update(worker_config["shared_extra_conf"]) + + # Check if more than one instance of this worker type has been specified + worker_type_total_count = worker_types.count(worker_type) + if worker_type_total_count > 1: + # Update the shared config with sharding-related options if necessary + add_sharding_to_shared_config( + shared_config, worker_type, worker_name, worker_port + ) + + # Enable the worker in supervisord + supervisord_config += SUPERVISORD_PROCESS_CONFIG_BLOCK.format_map(worker_config) + + # Add nginx location blocks for this worker's endpoints (if any are defined) + for pattern in worker_config["endpoint_patterns"]: + # Determine whether we need to load-balance this worker + if worker_type_total_count > 1: + # Create or add to a load-balanced upstream for this worker + nginx_upstreams.setdefault(worker_type, set()).add(worker_port) + + # Upstreams are named after the worker_type + upstream = "http://" + worker_type + else: + upstream = "http://localhost:%d" % (worker_port,) + + # Note that this endpoint should proxy to this upstream + nginx_locations[pattern] = upstream + + # Write out the worker's logging config file + + # Check whether we should write worker logs to disk, in addition to the console + extra_log_template_args = {} + if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"): + extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format( + dir=data_dir, name=worker_name + ) + + # Render and write the file + log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name) + convert( + "/conf/log.config", + log_config_filepath, + worker_name=worker_name, + **extra_log_template_args, + ) + + # Then a worker config file + convert( + "/conf/worker.yaml.j2", + "/conf/workers/{name}.yaml".format(name=worker_name), + **worker_config, + worker_log_config_filepath=log_config_filepath, + ) + + worker_port += 1 + + # Build the nginx location config blocks + nginx_location_config = "" + for endpoint, upstream in nginx_locations.items(): + nginx_location_config += NGINX_LOCATION_CONFIG_BLOCK.format( + endpoint=endpoint, + upstream=upstream, + ) + + # Determine the load-balancing upstreams to configure + nginx_upstream_config = "" + for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items(): + body = "" + for port in upstream_worker_ports: + body += " server localhost:%d;\n" % (port,) + + # Add to the list of configured upstreams + nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format( + upstream_worker_type=upstream_worker_type, + body=body, + ) + + # Finally, we'll write out the config files. + + # Shared homeserver config + convert( + "/conf/shared.yaml.j2", + "/conf/workers/shared.yaml", + shared_worker_config=yaml.dump(shared_config), + ) + + # Nginx config + convert( + "/conf/nginx.conf.j2", + "/etc/nginx/conf.d/matrix-synapse.conf", + worker_locations=nginx_location_config, + upstream_directives=nginx_upstream_config, + ) + + # Supervisord config + convert( + "/conf/supervisord.conf.j2", + "/etc/supervisor/conf.d/supervisord.conf", + main_config_path=config_path, + worker_config=supervisord_config, + ) + + # Ensure the logging directory exists + log_dir = data_dir + "/logs" + if not os.path.exists(log_dir): + os.mkdir(log_dir) + + +def start_supervisord(): + """Starts up supervisord which then starts and monitors all other necessary processes + + Raises: CalledProcessError if calling start.py return a non-zero exit code. + """ + subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE) + + +def main(args, environ): + config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") + config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml") + data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") + + # override SYNAPSE_NO_TLS, we don't support TLS in worker mode, + # this needs to be handled by a frontend proxy + environ["SYNAPSE_NO_TLS"] = "yes" + + # Generate the base homeserver config if one does not yet exist + if not os.path.exists(config_path): + log("Generating base homeserver config") + generate_base_homeserver_config() + + # This script may be run multiple times (mostly by Complement, see note at top of file). + # Don't re-configure workers in this instance. + mark_filepath = "/conf/workers_have_been_configured" + if not os.path.exists(mark_filepath): + # Always regenerate all other config files + generate_worker_files(environ, config_path, data_dir) + + # Mark workers as being configured + with open(mark_filepath, "w") as f: + f.write("") + + # Start supervisord, which will start Synapse, all of the configured worker + # processes, redis, nginx etc. according to the config we created above. + start_supervisord() + + +if __name__ == "__main__": + main(sys.argv, os.environ) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 6627fb2c7a..fcffb96fc7 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1610,6 +1610,91 @@ account_validity: #invalid_token_html_path: "invalid_token.html" +## Account Validity ## + +# Optional account validity configuration. This allows for accounts to be denied +# any request after a given period. +# +# Once this feature is enabled, Synapse will look for registered users without an +# expiration date at startup and will add one to every account it found using the +# current settings at that time. +# This means that, if a validity period is set, and Synapse is restarted (it will +# then derive an expiration date from the current validity period), and some time +# after that the validity period changes and Synapse is restarted, the users' +# expiration dates won't be updated unless their account is manually renewed. This +# date will be randomly selected within a range [now + period - d ; now + period], +# where d is equal to 10% of the validity period. +# +account_validity: + # The account validity feature is disabled by default. Uncomment the + # following line to enable it. + # + #enabled: true + + # The period after which an account is valid after its registration. When + # renewing the account, its validity period will be extended by this amount + # of time. This parameter is required when using the account validity + # feature. + # + #period: 6w + + # The amount of time before an account's expiry date at which Synapse will + # send an email to the account's email address with a renewal link. By + # default, no such emails are sent. + # + # If you enable this setting, you will also need to fill out the 'email' and + # 'public_baseurl' configuration sections. + # + #renew_at: 1w + + # The subject of the email sent out with the renewal link. '%(app)s' can be + # used as a placeholder for the 'app_name' parameter from the 'email' + # section. + # + # Note that the placeholder must be written '%(app)s', including the + # trailing 's'. + # + # If this is not set, a default value is used. + # + #renew_email_subject: "Renew your %(app)s account" + + # Directory in which Synapse will try to find templates for the HTML files to + # serve to the user when trying to renew an account. If not set, default + # templates from within the Synapse package will be used. + # + # The currently available templates are: + # + # * account_renewed.html: Displayed to the user after they have successfully + # renewed their account. + # + # * account_previously_renewed.html: Displayed to the user if they attempt to + # renew their account with a token that is valid, but that has already + # been used. In this case the account is not renewed again. + # + # * invalid_token.html: Displayed to the user when they try to renew an account + # with an unknown or invalid renewal token. + # + # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for + # default template contents. + # + # The file name of some of these templates can be configured below for legacy + # reasons. + # + #template_dir: "res/templates" + + # A custom file name for the 'account_renewed.html' template. + # + # If not set, the file is assumed to be named "account_renewed.html". + # + #account_renewed_html_path: "account_renewed.html" + + # A custom file name for the 'invalid_token.html' template. + # + # If not set, the file is assumed to be named "invalid_token.html". + # + #invalid_token_html_path: "invalid_token.html" + + ## Metrics ### # Enable collection and rendering of performance metrics @@ -2056,7 +2141,7 @@ saml2_config: # sub-properties: # # module: The class name of a custom mapping module. Default is -# 'synapse.handlers.oidc_handler.JinjaOidcMappingProvider'. +# 'synapse.handlers.oidc.JinjaOidcMappingProvider'. # See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers # for information on implementing a custom mapping provider. # diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md index e1d6ede7ba..50020d1a4a 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md @@ -106,7 +106,7 @@ A custom mapping provider must specify the following methods: Synapse has a built-in OpenID mapping provider if a custom provider isn't specified in the config. It is located at -[`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`](../synapse/handlers/oidc_handler.py). +[`synapse.handlers.oidc.JinjaOidcMappingProvider`](../synapse/handlers/oidc.py). ## SAML Mapping Providers @@ -190,4 +190,4 @@ A custom mapping provider must specify the following methods: Synapse has a built-in SAML mapping provider if a custom provider isn't specified in the config. It is located at -[`synapse.handlers.saml_handler.DefaultSamlMappingProvider`](../synapse/handlers/saml_handler.py). +[`synapse.handlers.saml.DefaultSamlMappingProvider`](../synapse/handlers/saml.py). diff --git a/mypy.ini b/mypy.ini index 32e6197409..a40f705b76 100644 --- a/mypy.ini +++ b/mypy.ini @@ -41,7 +41,6 @@ files = synapse/push, synapse/replication, synapse/rest, - synapse/secrets.py, synapse/server.py, synapse/server_notices, synapse/spam_checker_api, diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages index 3bb6e2c7ea..07d018db99 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages @@ -21,9 +21,10 @@ DISTS = ( "debian:buster", "debian:bullseye", "debian:sid", - "ubuntu:bionic", - "ubuntu:focal", - "ubuntu:groovy", + "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) + "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) + "ubuntu:groovy", # 20.10 (EOL 2021-07-07) + "ubuntu:hirsute", # 21.04 (EOL 2022-01-05) ) DESC = '''\ diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py index 313860df13..c82ddd9677 100755 --- a/scripts-dev/definitions.py +++ b/scripts-dev/definitions.py @@ -140,7 +140,7 @@ if __name__ == "__main__": definitions = {} for directory in args.directories: - for root, dirs, files in os.walk(directory): + for root, _, files in os.walk(directory): for filename in files: if filename.endswith(".py"): filepath = os.path.join(root, filename) diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py index 26ad7c67f4..e85420dea8 100755 --- a/scripts-dev/list_url_patterns.py +++ b/scripts-dev/list_url_patterns.py @@ -48,7 +48,7 @@ args = parser.parse_args() for directory in args.directories: - for root, dirs, files in os.walk(directory): + for root, _, files in os.walk(directory): for filename in files: if filename.endswith(".py"): filepath = os.path.join(root, filename) diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 18df68305b..1217e14874 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts-dev/sign_json b/scripts-dev/sign_json index 44553fb79a..4a43d3f2b0 100755 --- a/scripts-dev/sign_json +++ b/scripts-dev/sign_json @@ -1,6 +1,5 @@ #!/usr/bin/env python # -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts-dev/update_database b/scripts-dev/update_database index 56365e2b58..87f709b6ed 100755 --- a/scripts-dev/update_database +++ b/scripts-dev/update_database @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/export_signing_key b/scripts/export_signing_key index 8aec9d802b..0ed167ea85 100755 --- a/scripts/export_signing_key +++ b/scripts/export_signing_key @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/generate_log_config b/scripts/generate_log_config index a13a5634a3..e72a0dafb7 100755 --- a/scripts/generate_log_config +++ b/scripts/generate_log_config @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/generate_signing_key.py b/scripts/generate_signing_key.py index 16d7c4f382..07df25a809 100755 --- a/scripts/generate_signing_key.py +++ b/scripts/generate_signing_key.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/move_remote_media_to_new_store.py b/scripts/move_remote_media_to_new_store.py index 8477955a90..875aa4781f 100755 --- a/scripts/move_remote_media_to_new_store.py +++ b/scripts/move_remote_media_to_new_store.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/register_new_matrix_user b/scripts/register_new_matrix_user index 8b9d30877d..00104b9d62 100755 --- a/scripts/register_new_matrix_user +++ b/scripts/register_new_matrix_user @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index bc1a989fc1..aab2a06af0 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -637,8 +636,11 @@ class Porter(object): "device_inbox_sequence", ("device_inbox", "device_federation_outbox") ) await self._setup_sequence( - "account_data_sequence", ("room_account_data", "room_tags_revisions", "account_data")) - await self._setup_sequence("receipts_sequence", ("receipts_linearized", )) + "account_data_sequence", + ("room_account_data", "room_tags_revisions", "account_data"), + ) + await self._setup_sequence("receipts_sequence", ("receipts_linearized",)) + await self._setup_sequence("presence_stream_sequence", ("presence_stream",)) await self._setup_auth_chain_sequence() # Step 3. Get tables. diff --git a/setup.cfg b/setup.cfg index 33601b71d5..e5ceb7ed19 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,8 +18,7 @@ ignore = # E203: whitespace before ':' (which is contrary to pep8?) # E731: do not assign a lambda expression, use a def # E501: Line too long (black enforces this for us) -# B007: Subsection of the bugbear suite (TODO: add in remaining fixes) -ignore=W503,W504,E203,E731,E501,B007 +ignore=W503,W504,E203,E731,E501 [isort] line_length = 88 diff --git a/stubs/frozendict.pyi b/stubs/frozendict.pyi index 0368ba4703..24c6f3af77 100644 --- a/stubs/frozendict.pyi +++ b/stubs/frozendict.pyi @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/stubs/txredisapi.pyi b/stubs/txredisapi.pyi index 080ca40287..c1a06ae022 100644 --- a/stubs/txredisapi.pyi +++ b/stubs/txredisapi.pyi @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/__init__.py b/synapse/__init__.py index 781f5ac3a2..5eac40730a 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-9 New Vector Ltd # @@ -22,8 +21,8 @@ import os import sys # Check that we're not running on an unsupported Python version. -if sys.version_info < (3, 5): - print("Synapse requires Python 3.5 or above.") +if sys.version_info < (3, 6): + print("Synapse requires Python 3.6 or above.") sys.exit(1) # Twisted and canonicaljson will fail to import when this file is executed to @@ -48,7 +47,7 @@ try: except ImportError: pass -__version__ = "1.32.2" +__version__ = "1.33.0" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index dfe26dea6d..dae986c788 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector # diff --git a/synapse/api/__init__.py b/synapse/api/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/api/__init__.py +++ b/synapse/api/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 26cb1bc657..c3c776a9f9 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,14 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import pymacaroons from netaddr import IPAddress from twisted.web.server import Request -import synapse.types from synapse import event_auth from synapse.api.auth_blocking import AuthBlocking from synapse.api.constants import EventTypes, HistoryVisibility, Membership @@ -37,11 +35,14 @@ from synapse.http import get_request_user_agent from synapse.http.site import SynapseRequest from synapse.logging import opentracing as opentracing from synapse.storage.databases.main.registration import TokenLookupResult -from synapse.types import StateMap, UserID +from synapse.types import Requester, StateMap, UserID, create_requester from synapse.util.caches.lrucache import LruCache from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry from synapse.util.metrics import Measure +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -66,9 +67,10 @@ class Auth: """ FIXME: This class contains a mix of functions for authenticating users of our client-server API and authenticating events added to room graphs. + The latter should be moved to synapse.handlers.event_auth.EventAuthHandler. """ - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): self.hs = hs self.clock = hs.get_clock() self.store = hs.get_datastore() @@ -80,19 +82,21 @@ class Auth: self._auth_blocking = AuthBlocking(self.hs) - self._account_validity_enabled = hs.config.account_validity_enabled + self._account_validity_enabled = ( + hs.config.account_validity.account_validity_enabled + ) self._track_appservice_user_ips = hs.config.track_appservice_user_ips self._macaroon_secret_key = hs.config.macaroon_secret_key async def check_from_context( self, room_version: str, event, context, do_sig_check=True - ): + ) -> None: prev_state_ids = await context.get_prev_state_ids() auth_events_ids = self.compute_auth_events( event, prev_state_ids, for_verification=True ) - auth_events = await self.store.get_events(auth_events_ids) - auth_events = {(e.type, e.state_key): e for e in auth_events.values()} + auth_events_by_id = await self.store.get_events(auth_events_ids) + auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()} room_version_obj = KNOWN_ROOM_VERSIONS[room_version] event_auth.check( @@ -149,17 +153,11 @@ class Auth: raise AuthError(403, "User %s not in room %s" % (user_id, room_id)) - async def check_host_in_room(self, room_id, host): + async def check_host_in_room(self, room_id: str, host: str) -> bool: with Measure(self.clock, "check_host_in_room"): - latest_event_ids = await self.store.is_host_joined(room_id, host) - return latest_event_ids - - def can_federate(self, event, auth_events): - creation_event = auth_events.get((EventTypes.Create, "")) + return await self.store.is_host_joined(room_id, host) - return creation_event.content.get("m.federate", True) is True - - def get_public_keys(self, invite_event): + def get_public_keys(self, invite_event: EventBase) -> List[Dict[str, Any]]: return event_auth.get_public_keys(invite_event) async def get_user_by_req( @@ -168,7 +166,7 @@ class Auth: allow_guest: bool = False, rights: str = "access", allow_expired: bool = False, - ) -> synapse.types.Requester: + ) -> Requester: """Get a registered user's ID. Args: @@ -193,8 +191,8 @@ class Auth: access_token = self.get_access_token_from_request(request) - user_id, app_service = self._get_appservice_user_id(request) - if user_id: + user_id, app_service = await self._get_appservice_user_id(request) + if user_id and app_service: if ip_addr and self._track_appservice_user_ips: await self.store.insert_client_ip( user_id=user_id, @@ -204,9 +202,7 @@ class Auth: device_id="dummy-device", # stubbed ) - requester = synapse.types.create_requester( - user_id, app_service=app_service - ) + requester = create_requester(user_id, app_service=app_service) request.requester = user_id opentracing.set_tag("authenticated_entity", user_id) @@ -249,7 +245,7 @@ class Auth: errcode=Codes.GUEST_ACCESS_FORBIDDEN, ) - requester = synapse.types.create_requester( + requester = create_requester( user_info.user_id, token_id, is_guest, @@ -269,7 +265,9 @@ class Auth: except KeyError: raise MissingClientTokenError() - def _get_appservice_user_id(self, request): + async def _get_appservice_user_id( + self, request: Request + ) -> Tuple[Optional[str], Optional[ApplicationService]]: app_service = self.store.get_app_service_by_token( self.get_access_token_from_request(request) ) @@ -282,6 +280,9 @@ class Auth: if ip_address not in app_service.ip_range_whitelist: return None, None + # This will always be set by the time Twisted calls us. + assert request.args is not None + if b"user_id" not in request.args: return app_service.sender, app_service @@ -390,7 +391,9 @@ class Auth: logger.warning("Invalid macaroon in auth: %s %s", type(e), e) raise InvalidClientTokenError("Invalid macaroon passed.") - def _parse_and_validate_macaroon(self, token, rights="access"): + def _parse_and_validate_macaroon( + self, token: str, rights: str = "access" + ) -> Tuple[str, bool]: """Takes a macaroon and tries to parse and validate it. This is cached if and only if rights == access and there isn't an expiry. @@ -435,15 +438,16 @@ class Auth: return user_id, guest - def validate_macaroon(self, macaroon, type_string, user_id): + def validate_macaroon( + self, macaroon: pymacaroons.Macaroon, type_string: str, user_id: str + ) -> None: """ validate that a Macaroon is understood by and was signed by this server. Args: - macaroon(pymacaroons.Macaroon): The macaroon to validate - type_string(str): The kind of token required (e.g. "access", - "delete_pusher") - user_id (str): The user_id required + macaroon: The macaroon to validate + type_string: The kind of token required (e.g. "access", "delete_pusher") + user_id: The user_id required """ v = pymacaroons.Verifier() @@ -468,9 +472,7 @@ class Auth: if not service: logger.warning("Unrecognised appservice access token.") raise InvalidClientTokenError() - request.requester = synapse.types.create_requester( - service.sender, app_service=service - ) + request.requester = create_requester(service.sender, app_service=service) return service async def is_server_admin(self, user: UserID) -> bool: @@ -522,7 +524,7 @@ class Auth: return auth_ids - async def check_can_change_room_list(self, room_id: str, user: UserID): + async def check_can_change_room_list(self, room_id: str, user: UserID) -> bool: """Determine whether the user is allowed to edit the room's entry in the published room list. @@ -557,11 +559,11 @@ class Auth: return user_level >= send_level @staticmethod - def has_access_token(request: Request): + def has_access_token(request: Request) -> bool: """Checks if the request has an access_token. Returns: - bool: False if no access_token was given, True otherwise. + False if no access_token was given, True otherwise. """ # This will always be set by the time Twisted calls us. assert request.args is not None @@ -571,13 +573,13 @@ class Auth: return bool(query_params) or bool(auth_headers) @staticmethod - def get_access_token_from_request(request: Request): + def get_access_token_from_request(request: Request) -> str: """Extracts the access_token from the request. Args: request: The http request. Returns: - unicode: The access_token + The access_token Raises: MissingClientTokenError: If there isn't a single access_token in the request @@ -652,5 +654,5 @@ class Auth: % (user_id, room_id), ) - def check_auth_blocking(self, *args, **kwargs): - return self._auth_blocking.check_auth_blocking(*args, **kwargs) + async def check_auth_blocking(self, *args, **kwargs) -> None: + await self._auth_blocking.check_auth_blocking(*args, **kwargs) diff --git a/synapse/api/auth_blocking.py b/synapse/api/auth_blocking.py index d8088f524a..e6bced93d5 100644 --- a/synapse/api/auth_blocking.py +++ b/synapse/api/auth_blocking.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,18 +13,21 @@ # limitations under the License. import logging -from typing import Optional +from typing import TYPE_CHECKING, Optional from synapse.api.constants import LimitBlockingTypes, UserTypes from synapse.api.errors import Codes, ResourceLimitError from synapse.config.server import is_threepid_reserved from synapse.types import Requester +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class AuthBlocking: - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() self._server_notices_mxid = hs.config.server_notices_mxid @@ -44,7 +46,7 @@ class AuthBlocking: threepid: Optional[dict] = None, user_type: Optional[str] = None, requester: Optional[Requester] = None, - ): + ) -> None: """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag diff --git a/synapse/api/constants.py b/synapse/api/constants.py index a8ae41de48..936b6534b4 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd @@ -18,6 +17,9 @@ """Contains constants from the specification.""" +# the max size of a (canonical-json-encoded) event +MAX_PDU_SIZE = 65536 + # the "depth" field on events is limited to 2**63 - 1 MAX_DEPTH = 2 ** 63 - 1 diff --git a/synapse/api/errors.py b/synapse/api/errors.py index a71e518f90..f21443bc76 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 5caf336fd0..ce49a0ad58 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd diff --git a/synapse/api/presence.py b/synapse/api/presence.py index b9a8e29460..a3bf0348d1 100644 --- a/synapse/api/presence.py +++ b/synapse/api/presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 4a4ad4c9bb..2c0ead80f5 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 6379c86dde..4b1f213c75 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index d1a2cd5e19..f9940491e8 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 3912c8994c..638e01c1b2 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # Copyright 2019-2021 The Matrix.org Foundation C.I.C # @@ -31,9 +30,10 @@ from twisted.internet import defer, error, reactor from twisted.protocols.tls import TLSMemoryBIOFactory import synapse +from synapse.api.constants import MAX_PDU_SIZE from synapse.app import check_bind_error from synapse.app.phone_stats_home import start_phone_stats_home -from synapse.config.server import ListenerConfig +from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory from synapse.logging.context import PreserveLoggingContext from synapse.metrics.background_process_metrics import wrap_as_background_process @@ -289,7 +289,7 @@ def refresh_certificate(hs): logger.info("Context factories updated.") -async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]): +async def start(hs: "synapse.server.HomeServer"): """ Start a Synapse server or worker. @@ -301,7 +301,6 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon Args: hs: homeserver instance - listeners: Listener configuration ('listeners' in homeserver.yaml) """ # Set up the SIGHUP machinery. if hasattr(signal, "SIGHUP"): @@ -337,7 +336,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa # It is now safe to start your Synapse. - hs.start_listening(listeners) + hs.start_listening() hs.get_datastore().db_pool.start_profiling() hs.get_pusherpool().start() @@ -531,3 +530,25 @@ def sdnotify(state): # this is a bit surprising, since we don't expect to have a NOTIFY_SOCKET # unless systemd is expecting us to notify it. logger.warning("Unable to send notification to systemd: %s", e) + + +def max_request_body_size(config: HomeServerConfig) -> int: + """Get a suitable maximum size for incoming HTTP requests""" + + # Other than media uploads, the biggest request we expect to see is a fully-loaded + # /federation/v1/send request. + # + # The main thing in such a request is up to 50 PDUs, and up to 100 EDUs. PDUs are + # limited to 65536 bytes (possibly slightly more if the sender didn't use canonical + # json encoding); there is no specced limit to EDUs (see + # https://github.com/matrix-org/matrix-doc/issues/3121). + # + # in short, we somewhat arbitrarily limit requests to 200 * 64K (about 12.5M) + # + max_request_size = 200 * MAX_PDU_SIZE + + # if we have a media repo enabled, we may need to allow larger uploads than that + if config.media.can_load_media_repo: + max_request_size = max(max_request_size, config.media.max_upload_size) + + return max_request_size diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index 9f99651aa2..68ae19c977 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2019 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -71,12 +70,6 @@ class AdminCmdSlavedStore( class AdminCmdServer(HomeServer): DATASTORE_CLASS = AdminCmdSlavedStore - def _listen_http(self, listener_config): - pass - - def start_listening(self, listeners): - pass - async def export_data_command(hs, args): """Export data for a user. @@ -233,7 +226,7 @@ def start(config_options): async def run(): with LoggingContext("command"): - _base.start(ss, []) + _base.start(ss) await args.func(ss, args) _base.start_worker_reactor( diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index e9c098c4e7..57af28f10a 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 70ce8a8988..1a15ceee81 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -14,12 +13,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import contextlib import logging import sys -from typing import Dict, Iterable, Optional, Set - -from typing_extensions import ContextManager +from typing import Dict, Optional from twisted.internet import address from twisted.web.resource import IResource @@ -36,29 +32,18 @@ from synapse.api.urls import ( SERVER_KEY_V2_PREFIX, ) from synapse.app import _base -from synapse.app._base import register_start +from synapse.app._base import max_request_body_size, register_start from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.config.server import ListenerConfig -from synapse.federation import send_queue from synapse.federation.transport.server import TransportLayerServer -from synapse.handlers.presence import ( - BasePresenceHandler, - PresenceState, - get_interested_parties, -) from synapse.http.server import JsonResource, OptionsResource from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseSite from synapse.logging.context import LoggingContext from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy -from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource -from synapse.replication.http.presence import ( - ReplicationBumpPresenceActiveTime, - ReplicationPresenceSetState, -) from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.account_data import SlavedAccountDataStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -70,7 +55,6 @@ from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.filtering import SlavedFilteringStore from synapse.replication.slave.storage.groups import SlavedGroupServerStore from synapse.replication.slave.storage.keys import SlavedKeyStore -from synapse.replication.slave.storage.presence import SlavedPresenceStore from synapse.replication.slave.storage.profile import SlavedProfileStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore @@ -78,21 +62,8 @@ from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore -from synapse.replication.tcp.client import ReplicationDataHandler -from synapse.replication.tcp.commands import ClearUserSyncsCommand -from synapse.replication.tcp.streams import ( - AccountDataStream, - DeviceListsStream, - GroupServerStream, - PresenceStream, - PushersStream, - PushRulesStream, - ReceiptsStream, - TagAccountDataStream, - ToDeviceStream, -) from synapse.rest.admin import register_servlets_for_media_repo -from synapse.rest.client.v1 import events, login, room +from synapse.rest.client.v1 import events, login, presence, room from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet from synapse.rest.client.v1.profile import ( ProfileAvatarURLRestServlet, @@ -129,7 +100,7 @@ from synapse.rest.client.versions import VersionsRestServlet from synapse.rest.health import HealthResource from synapse.rest.key.v2 import KeyApiV2Resource from synapse.rest.synapse.client import build_synapse_client_resource_tree -from synapse.server import HomeServer, cache_in_self +from synapse.server import HomeServer from synapse.storage.databases.main.censor_events import CensorEventsStore from synapse.storage.databases.main.client_ips import ClientIpWorkerStore from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyStore @@ -138,40 +109,18 @@ from synapse.storage.databases.main.metrics import ServerMetricsStore from synapse.storage.databases.main.monthly_active_users import ( MonthlyActiveUsersWorkerStore, ) -from synapse.storage.databases.main.presence import UserPresenceState +from synapse.storage.databases.main.presence import PresenceStore from synapse.storage.databases.main.search import SearchWorkerStore from synapse.storage.databases.main.stats import StatsStore from synapse.storage.databases.main.transactions import TransactionWorkerStore from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore from synapse.storage.databases.main.user_directory import UserDirectoryStore -from synapse.types import ReadReceipt -from synapse.util.async_helpers import Linearizer from synapse.util.httpresourcetree import create_resource_tree from synapse.util.versionstring import get_version_string logger = logging.getLogger("synapse.app.generic_worker") -class PresenceStatusStubServlet(RestServlet): - """If presence is disabled this servlet can be used to stub out setting - presence status. - """ - - PATTERNS = client_patterns("/presence/(?P<user_id>[^/]*)/status") - - def __init__(self, hs): - super().__init__() - self.auth = hs.get_auth() - - async def on_GET(self, request, user_id): - await self.auth.get_user_by_req(request) - return 200, {"presence": "offline", "user_id": user_id} - - async def on_PUT(self, request, user_id): - await self.auth.get_user_by_req(request) - return 200, {} - - class KeyUploadServlet(RestServlet): """An implementation of the `KeyUploadServlet` that responds to read only requests, but otherwise proxies through to the master instance. @@ -265,214 +214,6 @@ class KeyUploadServlet(RestServlet): return 200, {"one_time_key_counts": result} -class _NullContextManager(ContextManager[None]): - """A context manager which does nothing.""" - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - -UPDATE_SYNCING_USERS_MS = 10 * 1000 - - -class GenericWorkerPresence(BasePresenceHandler): - def __init__(self, hs): - super().__init__(hs) - self.hs = hs - self.is_mine_id = hs.is_mine_id - - self.presence_router = hs.get_presence_router() - self._presence_enabled = hs.config.use_presence - - # The number of ongoing syncs on this process, by user id. - # Empty if _presence_enabled is false. - self._user_to_num_current_syncs = {} # type: Dict[str, int] - - self.notifier = hs.get_notifier() - self.instance_id = hs.get_instance_id() - - # user_id -> last_sync_ms. Lists the users that have stopped syncing - # but we haven't notified the master of that yet - self.users_going_offline = {} - - self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs) - self._set_state_client = ReplicationPresenceSetState.make_client(hs) - - self._send_stop_syncing_loop = self.clock.looping_call( - self.send_stop_syncing, UPDATE_SYNCING_USERS_MS - ) - - self._busy_presence_enabled = hs.config.experimental.msc3026_enabled - - hs.get_reactor().addSystemEventTrigger( - "before", - "shutdown", - run_as_background_process, - "generic_presence.on_shutdown", - self._on_shutdown, - ) - - def _on_shutdown(self): - if self._presence_enabled: - self.hs.get_tcp_replication().send_command( - ClearUserSyncsCommand(self.instance_id) - ) - - def send_user_sync(self, user_id, is_syncing, last_sync_ms): - if self._presence_enabled: - self.hs.get_tcp_replication().send_user_sync( - self.instance_id, user_id, is_syncing, last_sync_ms - ) - - def mark_as_coming_online(self, user_id): - """A user has started syncing. Send a UserSync to the master, unless they - had recently stopped syncing. - - Args: - user_id (str) - """ - going_offline = self.users_going_offline.pop(user_id, None) - if not going_offline: - # Safe to skip because we haven't yet told the master they were offline - self.send_user_sync(user_id, True, self.clock.time_msec()) - - def mark_as_going_offline(self, user_id): - """A user has stopped syncing. We wait before notifying the master as - its likely they'll come back soon. This allows us to avoid sending - a stopped syncing immediately followed by a started syncing notification - to the master - - Args: - user_id (str) - """ - self.users_going_offline[user_id] = self.clock.time_msec() - - def send_stop_syncing(self): - """Check if there are any users who have stopped syncing a while ago - and haven't come back yet. If there are poke the master about them. - """ - now = self.clock.time_msec() - for user_id, last_sync_ms in list(self.users_going_offline.items()): - if now - last_sync_ms > UPDATE_SYNCING_USERS_MS: - self.users_going_offline.pop(user_id, None) - self.send_user_sync(user_id, False, last_sync_ms) - - async def user_syncing( - self, user_id: str, affect_presence: bool - ) -> ContextManager[None]: - """Record that a user is syncing. - - Called by the sync and events servlets to record that a user has connected to - this worker and is waiting for some events. - """ - if not affect_presence or not self._presence_enabled: - return _NullContextManager() - - curr_sync = self._user_to_num_current_syncs.get(user_id, 0) - self._user_to_num_current_syncs[user_id] = curr_sync + 1 - - # If we went from no in flight sync to some, notify replication - if self._user_to_num_current_syncs[user_id] == 1: - self.mark_as_coming_online(user_id) - - def _end(): - # We check that the user_id is in user_to_num_current_syncs because - # user_to_num_current_syncs may have been cleared if we are - # shutting down. - if user_id in self._user_to_num_current_syncs: - self._user_to_num_current_syncs[user_id] -= 1 - - # If we went from one in flight sync to non, notify replication - if self._user_to_num_current_syncs[user_id] == 0: - self.mark_as_going_offline(user_id) - - @contextlib.contextmanager - def _user_syncing(): - try: - yield - finally: - _end() - - return _user_syncing() - - async def notify_from_replication(self, states, stream_id): - parties = await get_interested_parties(self.store, self.presence_router, states) - room_ids_to_states, users_to_states = parties - - self.notifier.on_new_event( - "presence_key", - stream_id, - rooms=room_ids_to_states.keys(), - users=users_to_states.keys(), - ) - - async def process_replication_rows(self, token, rows): - states = [ - UserPresenceState( - row.user_id, - row.state, - row.last_active_ts, - row.last_federation_update_ts, - row.last_user_sync_ts, - row.status_msg, - row.currently_active, - ) - for row in rows - ] - - for state in states: - self.user_to_current_state[state.user_id] = state - - stream_id = token - await self.notify_from_replication(states, stream_id) - - def get_currently_syncing_users_for_replication(self) -> Iterable[str]: - return [ - user_id - for user_id, count in self._user_to_num_current_syncs.items() - if count > 0 - ] - - async def set_state(self, target_user, state, ignore_status_msg=False): - """Set the presence state of the user.""" - presence = state["presence"] - - valid_presence = ( - PresenceState.ONLINE, - PresenceState.UNAVAILABLE, - PresenceState.OFFLINE, - PresenceState.BUSY, - ) - - if presence not in valid_presence or ( - presence == PresenceState.BUSY and not self._busy_presence_enabled - ): - raise SynapseError(400, "Invalid presence state") - - user_id = target_user.to_string() - - # If presence is disabled, no-op - if not self.hs.config.use_presence: - return - - # Proxy request to master - await self._set_state_client( - user_id=user_id, state=state, ignore_status_msg=ignore_status_msg - ) - - async def bump_presence_active_time(self, user): - """We've seen the user do something that indicates they're interacting - with the app. - """ - # If presence is disabled, no-op - if not self.hs.config.use_presence: - return - - # Proxy request to master - user_id = user.to_string() - await self._bump_active_client(user_id=user_id) - - class GenericWorkerSlavedStore( # FIXME(#3714): We need to add UserDirectoryStore as we write directly # rather than going via the correct worker. @@ -480,6 +221,7 @@ class GenericWorkerSlavedStore( StatsStore, UIAuthWorkerStore, EndToEndRoomKeyStore, + PresenceStore, SlavedDeviceInboxStore, SlavedDeviceStore, SlavedReceiptsStore, @@ -498,7 +240,6 @@ class GenericWorkerSlavedStore( SlavedTransactionStore, SlavedProfileStore, SlavedClientIpStore, - SlavedPresenceStore, SlavedFilteringStore, MonthlyActiveUsersWorkerStore, MediaRepositoryStore, @@ -566,10 +307,7 @@ class GenericWorkerServer(HomeServer): user_directory.register_servlets(self, resource) - # If presence is disabled, use the stub servlet that does - # not allow sending presence - if not self.config.use_presence: - PresenceStatusStubServlet(self).register(resource) + presence.register_servlets(self, resource) groups.register_servlets(self, resource) @@ -629,14 +367,16 @@ class GenericWorkerServer(HomeServer): listener_config, root_resource, self.version_string, + max_request_body_size=max_request_body_size(self.config), + reactor=self.get_reactor(), ), reactor=self.get_reactor(), ) logger.info("Synapse worker now listening on port %d", port) - def start_listening(self, listeners: Iterable[ListenerConfig]): - for listener in listeners: + def start_listening(self): + for listener in self.config.worker_listeners: if listener.type == "http": self._listen_http(listener) elif listener.type == "manhole": @@ -644,7 +384,7 @@ class GenericWorkerServer(HomeServer): listener.bind_addresses, listener.port, manhole_globals={"hs": self} ) elif listener.type == "metrics": - if not self.get_config().enable_metrics: + if not self.config.enable_metrics: logger.warning( ( "Metrics listener configured, but " @@ -658,234 +398,6 @@ class GenericWorkerServer(HomeServer): self.get_tcp_replication().start_replication(self) - @cache_in_self - def get_replication_data_handler(self): - return GenericWorkerReplicationHandler(self) - - @cache_in_self - def get_presence_handler(self): - return GenericWorkerPresence(self) - - -class GenericWorkerReplicationHandler(ReplicationDataHandler): - def __init__(self, hs): - super().__init__(hs) - - self.store = hs.get_datastore() - self.presence_handler = hs.get_presence_handler() # type: GenericWorkerPresence - self.notifier = hs.get_notifier() - - self.notify_pushers = hs.config.start_pushers - self.pusher_pool = hs.get_pusherpool() - - self.send_handler = None # type: Optional[FederationSenderHandler] - if hs.config.send_federation: - self.send_handler = FederationSenderHandler(hs) - - async def on_rdata(self, stream_name, instance_name, token, rows): - await super().on_rdata(stream_name, instance_name, token, rows) - await self._process_and_notify(stream_name, instance_name, token, rows) - - async def _process_and_notify(self, stream_name, instance_name, token, rows): - try: - if self.send_handler: - await self.send_handler.process_replication_rows( - stream_name, token, rows - ) - - if stream_name == PushRulesStream.NAME: - self.notifier.on_new_event( - "push_rules_key", token, users=[row.user_id for row in rows] - ) - elif stream_name in (AccountDataStream.NAME, TagAccountDataStream.NAME): - self.notifier.on_new_event( - "account_data_key", token, users=[row.user_id for row in rows] - ) - elif stream_name == ReceiptsStream.NAME: - self.notifier.on_new_event( - "receipt_key", token, rooms=[row.room_id for row in rows] - ) - await self.pusher_pool.on_new_receipts( - token, token, {row.room_id for row in rows} - ) - elif stream_name == ToDeviceStream.NAME: - entities = [row.entity for row in rows if row.entity.startswith("@")] - if entities: - self.notifier.on_new_event("to_device_key", token, users=entities) - elif stream_name == DeviceListsStream.NAME: - all_room_ids = set() # type: Set[str] - for row in rows: - if row.entity.startswith("@"): - room_ids = await self.store.get_rooms_for_user(row.entity) - all_room_ids.update(room_ids) - self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids) - elif stream_name == PresenceStream.NAME: - await self.presence_handler.process_replication_rows(token, rows) - elif stream_name == GroupServerStream.NAME: - self.notifier.on_new_event( - "groups_key", token, users=[row.user_id for row in rows] - ) - elif stream_name == PushersStream.NAME: - for row in rows: - if row.deleted: - self.stop_pusher(row.user_id, row.app_id, row.pushkey) - else: - await self.start_pusher(row.user_id, row.app_id, row.pushkey) - except Exception: - logger.exception("Error processing replication") - - async def on_position(self, stream_name: str, instance_name: str, token: int): - await super().on_position(stream_name, instance_name, token) - # Also call on_rdata to ensure that stream positions are properly reset. - await self.on_rdata(stream_name, instance_name, token, []) - - def stop_pusher(self, user_id, app_id, pushkey): - if not self.notify_pushers: - return - - key = "%s:%s" % (app_id, pushkey) - pushers_for_user = self.pusher_pool.pushers.get(user_id, {}) - pusher = pushers_for_user.pop(key, None) - if pusher is None: - return - logger.info("Stopping pusher %r / %r", user_id, key) - pusher.on_stop() - - async def start_pusher(self, user_id, app_id, pushkey): - if not self.notify_pushers: - return - - key = "%s:%s" % (app_id, pushkey) - logger.info("Starting pusher %r / %r", user_id, key) - return await self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id) - - def on_remote_server_up(self, server: str): - """Called when get a new REMOTE_SERVER_UP command.""" - - # Let's wake up the transaction queue for the server in case we have - # pending stuff to send to it. - if self.send_handler: - self.send_handler.wake_destination(server) - - -class FederationSenderHandler: - """Processes the fedration replication stream - - This class is only instantiate on the worker responsible for sending outbound - federation transactions. It receives rows from the replication stream and forwards - the appropriate entries to the FederationSender class. - """ - - def __init__(self, hs: GenericWorkerServer): - self.store = hs.get_datastore() - self._is_mine_id = hs.is_mine_id - self.federation_sender = hs.get_federation_sender() - self._hs = hs - - # Stores the latest position in the federation stream we've gotten up - # to. This is always set before we use it. - self.federation_position = None - - self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer") - - def wake_destination(self, server: str): - self.federation_sender.wake_destination(server) - - async def process_replication_rows(self, stream_name, token, rows): - # The federation stream contains things that we want to send out, e.g. - # presence, typing, etc. - if stream_name == "federation": - send_queue.process_rows_for_federation(self.federation_sender, rows) - await self.update_token(token) - - # ... and when new receipts happen - elif stream_name == ReceiptsStream.NAME: - await self._on_new_receipts(rows) - - # ... as well as device updates and messages - elif stream_name == DeviceListsStream.NAME: - # The entities are either user IDs (starting with '@') whose devices - # have changed, or remote servers that we need to tell about - # changes. - hosts = {row.entity for row in rows if not row.entity.startswith("@")} - for host in hosts: - self.federation_sender.send_device_messages(host) - - elif stream_name == ToDeviceStream.NAME: - # The to_device stream includes stuff to be pushed to both local - # clients and remote servers, so we ignore entities that start with - # '@' (since they'll be local users rather than destinations). - hosts = {row.entity for row in rows if not row.entity.startswith("@")} - for host in hosts: - self.federation_sender.send_device_messages(host) - - async def _on_new_receipts(self, rows): - """ - Args: - rows (Iterable[synapse.replication.tcp.streams.ReceiptsStream.ReceiptsStreamRow]): - new receipts to be processed - """ - for receipt in rows: - # we only want to send on receipts for our own users - if not self._is_mine_id(receipt.user_id): - continue - receipt_info = ReadReceipt( - receipt.room_id, - receipt.receipt_type, - receipt.user_id, - [receipt.event_id], - receipt.data, - ) - await self.federation_sender.send_read_receipt(receipt_info) - - async def update_token(self, token): - """Update the record of where we have processed to in the federation stream. - - Called after we have processed a an update received over replication. Sends - a FEDERATION_ACK back to the master, and stores the token that we have processed - in `federation_stream_position` so that we can restart where we left off. - """ - self.federation_position = token - - # We save and send the ACK to master asynchronously, so we don't block - # processing on persistence. We don't need to do this operation for - # every single RDATA we receive, we just need to do it periodically. - - if self._fed_position_linearizer.is_queued(None): - # There is already a task queued up to save and send the token, so - # no need to queue up another task. - return - - run_as_background_process("_save_and_send_ack", self._save_and_send_ack) - - async def _save_and_send_ack(self): - """Save the current federation position in the database and send an ACK - to master with where we're up to. - """ - try: - # We linearize here to ensure we don't have races updating the token - # - # XXX this appears to be redundant, since the ReplicationCommandHandler - # has a linearizer which ensures that we only process one line of - # replication data at a time. Should we remove it, or is it doing useful - # service for robustness? Or could we replace it with an assertion that - # we're not being re-entered? - - with (await self._fed_position_linearizer.queue(None)): - # We persist and ack the same position, so we take a copy of it - # here as otherwise it can get modified from underneath us. - current_position = self.federation_position - - await self.store.update_federation_out_pos( - "federation", current_position - ) - - # We ACK this token over replication so that the master can drop - # its in memory queues - self._hs.get_tcp_replication().send_federation_ack(current_position) - except Exception: - logger.exception("Error updating federation stream position") - def start(config_options): try: @@ -957,7 +469,7 @@ def start(config_options): # streams. Will no-op if no streams can be written to by this worker. hs.get_replication_streamer() - register_start(_base.start, hs, config.worker_listeners) + register_start(_base.start, hs) _base.start_worker_reactor("synapse-generic-worker", config) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3bfe9d507f..8e78134bbe 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # @@ -18,7 +17,7 @@ import logging import os import sys -from typing import Iterable, Iterator +from typing import Iterator from twisted.internet import reactor from twisted.web.resource import EncodingResourceWrapper, IResource @@ -37,7 +36,13 @@ from synapse.api.urls import ( WEB_CLIENT_PREFIX, ) from synapse.app import _base -from synapse.app._base import listen_ssl, listen_tcp, quit_with_error, register_start +from synapse.app._base import ( + listen_ssl, + listen_tcp, + max_request_body_size, + quit_with_error, + register_start, +) from synapse.config._base import ConfigError from synapse.config.emailconfig import ThreepidBehaviour from synapse.config.homeserver import HomeServerConfig @@ -127,19 +132,21 @@ class SynapseHomeServer(HomeServer): else: root_resource = OptionsResource() - root_resource = create_resource_tree(resources, root_resource) + site = SynapseSite( + "synapse.access.%s.%s" % ("https" if tls else "http", site_tag), + site_tag, + listener_config, + create_resource_tree(resources, root_resource), + self.version_string, + max_request_body_size=max_request_body_size(self.config), + reactor=self.get_reactor(), + ) if tls: ports = listen_ssl( bind_addresses, port, - SynapseSite( - "synapse.access.https.%s" % (site_tag,), - site_tag, - listener_config, - root_resource, - self.version_string, - ), + site, self.tls_server_context_factory, reactor=self.get_reactor(), ) @@ -149,13 +156,7 @@ class SynapseHomeServer(HomeServer): ports = listen_tcp( bind_addresses, port, - SynapseSite( - "synapse.access.http.%s" % (site_tag,), - site_tag, - listener_config, - root_resource, - self.version_string, - ), + site, reactor=self.get_reactor(), ) logger.info("Synapse now listening on TCP port %d", port) @@ -192,7 +193,7 @@ class SynapseHomeServer(HomeServer): } ) - if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL: + if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL: from synapse.rest.synapse.client.password_reset import ( PasswordResetSubmitTokenResource, ) @@ -231,7 +232,7 @@ class SynapseHomeServer(HomeServer): ) if name in ["media", "federation", "client"]: - if self.get_config().enable_media_repo: + if self.config.enable_media_repo: media_repo = self.get_media_repository_resource() resources.update( {MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo} @@ -245,7 +246,7 @@ class SynapseHomeServer(HomeServer): resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self) if name == "webclient": - webclient_loc = self.get_config().web_client_location + webclient_loc = self.config.web_client_location if webclient_loc is None: logger.warning( @@ -266,7 +267,7 @@ class SynapseHomeServer(HomeServer): # https://twistedmatrix.com/trac/ticket/7678 resources[WEB_CLIENT_PREFIX] = File(webclient_loc) - if name == "metrics" and self.get_config().enable_metrics: + if name == "metrics" and self.config.enable_metrics: resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) if name == "replication": @@ -274,18 +275,18 @@ class SynapseHomeServer(HomeServer): return resources - def start_listening(self, listeners: Iterable[ListenerConfig]): - config = self.get_config() - - if config.redis_enabled: + def start_listening(self): + if self.config.redis_enabled: # If redis is enabled we connect via the replication command handler # in the same way as the workers (since we're effectively a client # rather than a server). self.get_tcp_replication().start_replication(self) - for listener in listeners: + for listener in self.config.server.listeners: if listener.type == "http": - self._listening_services.extend(self._listener_http(config, listener)) + self._listening_services.extend( + self._listener_http(self.config, listener) + ) elif listener.type == "manhole": _base.listen_manhole( listener.bind_addresses, listener.port, manhole_globals={"hs": self} @@ -299,7 +300,7 @@ class SynapseHomeServer(HomeServer): for s in services: reactor.addSystemEventTrigger("before", "shutdown", s.stopListening) elif listener.type == "metrics": - if not self.get_config().enable_metrics: + if not self.config.enable_metrics: logger.warning( ( "Metrics listener configured, but " @@ -413,7 +414,7 @@ def setup(config_options): # Loading the provider metadata also ensures the provider config is valid. await oidc.load_metadata() - await _base.start(hs, config.listeners) + await _base.start(hs) hs.get_datastore().db_pool.updates.start_doing_background_updates() diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index add43147b3..2d50060ffb 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 503d44f687..a368efb354 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 0bfc5e445f..6504c6bd3f 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 8c5d178e05..61152b2c46 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index 5203ffe90f..6a2ce99b55 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/__init__.py b/synapse/config/__init__.py index 1e76e9559d..d2f889159e 100644 --- a/synapse/config/__init__.py +++ b/synapse/config/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py index 65043d5b5b..b5b6735a8f 100644 --- a/synapse/config/__main__.py +++ b/synapse/config/__main__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/_base.py b/synapse/config/_base.py index ba9cd63cf2..08e2c2c543 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi index ddec356a07..ff9abbc232 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi @@ -7,16 +7,16 @@ from synapse.config import ( auth, captcha, cas, - consent_config, + consent, database, emailconfig, experimental, groups, - jwt_config, + jwt, key, logger, metrics, - oidc_config, + oidc, password_auth_providers, push, ratelimiting, @@ -24,9 +24,9 @@ from synapse.config import ( registration, repository, room_directory, - saml2_config, + saml2, server, - server_notices_config, + server_notices, spam_checker, sso, stats, @@ -65,11 +65,11 @@ class RootConfig: api: api.ApiConfig appservice: appservice.AppServiceConfig key: key.KeyConfig - saml2: saml2_config.SAML2Config + saml2: saml2.SAML2Config cas: cas.CasConfig sso: sso.SSOConfig - oidc: oidc_config.OIDCConfig - jwt: jwt_config.JWTConfig + oidc: oidc.OIDCConfig + jwt: jwt.JWTConfig auth: auth.AuthConfig email: emailconfig.EmailConfig worker: workers.WorkerConfig @@ -78,9 +78,9 @@ class RootConfig: spamchecker: spam_checker.SpamCheckerConfig groups: groups.GroupsConfig userdirectory: user_directory.UserDirectoryConfig - consent: consent_config.ConsentConfig + consent: consent.ConsentConfig stats: stats.StatsConfig - servernotices: server_notices_config.ServerNoticesConfig + servernotices: server_notices.ServerNoticesConfig roomdirectory: room_directory.RoomDirectoryConfig thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig tracer: tracer.TracerConfig diff --git a/synapse/config/_util.py b/synapse/config/_util.py index 8fce7f6bb1..3edb4b7106 100644 --- a/synapse/config/_util.py +++ b/synapse/config/_util.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/account_validity.py b/synapse/config/account_validity.py index 6d107944a3..842fe2ebf5 100644 --- a/synapse/config/account_validity.py +++ b/synapse/config/account_validity.py @@ -83,7 +83,7 @@ class AccountValidityConfig(Config): def generate_config_section(self, **kwargs): return """\ ## Account Validity ## - # + # Optional account validity configuration. This allows for accounts to be denied # any request after a given period. # @@ -134,16 +134,35 @@ class AccountValidityConfig(Config): # serve to the user when trying to renew an account. If not set, default # templates from within the Synapse package will be used. # + # The currently available templates are: + # + # * account_renewed.html: Displayed to the user after they have successfully + # renewed their account. + # + # * account_previously_renewed.html: Displayed to the user if they attempt to + # renew their account with a token that is valid, but that has already + # been used. In this case the account is not renewed again. + # + # * invalid_token.html: Displayed to the user when they try to renew an account + # with an unknown or invalid renewal token. + # + # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for + # default template contents. + # + # The file name of some of these templates can be configured below for legacy + # reasons. + # #template_dir: "res/templates" - # File within 'template_dir' giving the HTML to be displayed to the user after - # they successfully renewed their account. If not set, default text is used. + # A custom file name for the 'account_renewed.html' template. + # + # If not set, the file is assumed to be named "account_renewed.html". # #account_renewed_html_path: "account_renewed.html" - # File within 'template_dir' giving the HTML to be displayed when the user - # tries to renew an account with an invalid renewal token. If not set, - # default text is used. + # A custom file name for the 'invalid_token.html' template. + # + # If not set, the file is assumed to be named "invalid_token.html". # #invalid_token_html_path: "invalid_token.html" """ diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 9aabaadf9e..e10d641a96 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/config/cache.py b/synapse/config/cache.py index 4e8abbf88a..41b9b3f51f 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/cas.py b/synapse/config/cas.py index dbf5085965..901f4123e1 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/consent_config.py b/synapse/config/consent.py index c47f364b14..30d07cc219 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/database.py b/synapse/config/database.py index e7889b9c20..79a02706b4 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 5431691831..5564d7d097 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index a207de63b8..7137e3d323 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/federation.py b/synapse/config/federation.py index 55e4db5442..090ba047fa 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/groups.py b/synapse/config/groups.py index 7b7860ea71..15c2e64bda 100644 --- a/synapse/config/groups.py +++ b/synapse/config/groups.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 58961679ff..c23b66c88c 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # @@ -21,17 +20,17 @@ from .auth import AuthConfig from .cache import CacheConfig from .captcha import CaptchaConfig from .cas import CasConfig -from .consent_config import ConsentConfig +from .consent import ConsentConfig from .database import DatabaseConfig from .emailconfig import EmailConfig from .experimental import ExperimentalConfig from .federation import FederationConfig from .groups import GroupsConfig -from .jwt_config import JWTConfig +from .jwt import JWTConfig from .key import KeyConfig from .logger import LoggingConfig from .metrics import MetricsConfig -from .oidc_config import OIDCConfig +from .oidc import OIDCConfig from .password_auth_providers import PasswordAuthProviderConfig from .push import PushConfig from .ratelimiting import RatelimitConfig @@ -40,9 +39,9 @@ from .registration import RegistrationConfig from .repository import ContentRepositoryConfig from .room import RoomConfig from .room_directory import RoomDirectoryConfig -from .saml2_config import SAML2Config +from .saml2 import SAML2Config from .server import ServerConfig -from .server_notices_config import ServerNoticesConfig +from .server_notices import ServerNoticesConfig from .spam_checker import SpamCheckerConfig from .sso import SSOConfig from .stats import StatsConfig diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt.py index f30330abb6..9e07e73008 100644 --- a/synapse/config/jwt_config.py +++ b/synapse/config/jwt.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015 Niklas Riekenbrauck # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/key.py b/synapse/config/key.py index 350ff1d665..94a9063043 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 999aecce5c..813076dfe2 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,7 +31,6 @@ from twisted.logger import ( ) import synapse -from synapse.app import _base as appbase from synapse.logging._structured import setup_structured_logging from synapse.logging.context import LoggingContextFilter from synapse.logging.filter import MetadataFilter @@ -319,6 +317,8 @@ def setup_logging( # Perform one-time logging configuration. _setup_stdlib_logging(config, log_config_path, logBeginner=logBeginner) # Add a SIGHUP handler to reload the logging configuration, if one is available. + from synapse.app import _base as appbase + appbase.register_sighup(_reload_logging_config, log_config_path) # Log immediately so we can grep backwards. diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 2b289f4208..7ac82edb0e 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc.py index 05733ec41d..ea0abf5aa2 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # @@ -15,20 +14,23 @@ # limitations under the License. from collections import Counter -from typing import Iterable, List, Mapping, Optional, Tuple, Type +from typing import Collection, Iterable, List, Mapping, Optional, Tuple, Type import attr from synapse.config._util import validate_config from synapse.config.sso import SsoAttributeRequirement from synapse.python_dependencies import DependencyException, check_requirements -from synapse.types import Collection, JsonDict +from synapse.types import JsonDict from synapse.util.module_loader import load_module from synapse.util.stringutils import parse_and_validate_mxc_uri from ._base import Config, ConfigError, read_file -DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider" +DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc.JinjaOidcMappingProvider" +# The module that JinjaOidcMappingProvider is in was renamed, we want to +# transparently handle both the same. +LEGACY_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider" class OIDCConfig(Config): @@ -404,6 +406,8 @@ def _parse_oidc_config_dict( """ ump_config = oidc_config.get("user_mapping_provider", {}) ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER) + if ump_config.get("module") == LEGACY_USER_MAPPING_PROVIDER: + ump_config["module"] = DEFAULT_USER_MAPPING_PROVIDER ump_config.setdefault("config", {}) ( diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index 85d07c4f8f..1cf69734bb 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 Openmarket # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/push.py b/synapse/config/push.py index 7831a2ef79..6ef8491caf 100644 --- a/synapse/config/push.py +++ b/synapse/config/push.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 New Vector Ltd # diff --git a/synapse/config/redis.py b/synapse/config/redis.py index 1373302335..33104af734 100644 --- a/synapse/config/redis.py +++ b/synapse/config/redis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 1f441107b3..632ef0d796 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 6cade50acc..003b1a957d 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014, 2015 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -71,6 +70,7 @@ def parse_thumbnail_requirements(thumbnail_sizes): jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg") png_thumbnail = ThumbnailRequirement(width, height, method, "image/png") requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail) + requirements.setdefault("image/jpg", []).append(jpeg_thumbnail) requirements.setdefault("image/webp", []).append(jpeg_thumbnail) requirements.setdefault("image/gif", []).append(png_thumbnail) requirements.setdefault("image/png", []).append(png_thumbnail) diff --git a/synapse/config/room.py b/synapse/config/room.py index 692d7a1936..d889d90dbc 100644 --- a/synapse/config/room.py +++ b/synapse/config/room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 2dd719c388..56981cac79 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2.py index 6db9cb5ced..3d1218c8d1 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # @@ -26,7 +25,10 @@ from ._util import validate_config logger = logging.getLogger(__name__) -DEFAULT_USER_MAPPING_PROVIDER = ( +DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.saml.DefaultSamlMappingProvider" +# The module that DefaultSamlMappingProvider is in was renamed, we want to +# transparently handle both the same. +LEGACY_USER_MAPPING_PROVIDER = ( "synapse.handlers.saml_handler.DefaultSamlMappingProvider" ) @@ -98,6 +100,8 @@ class SAML2Config(Config): # Use the default user mapping provider if not set ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER) + if ump_dict.get("module") == LEGACY_USER_MAPPING_PROVIDER: + ump_dict["module"] = DEFAULT_USER_MAPPING_PROVIDER # Ensure a config is present ump_dict["config"] = ump_dict.get("config") or {} diff --git a/synapse/config/server.py b/synapse/config/server.py index 6de884eaf7..9b7cc488cf 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -236,7 +235,11 @@ class ServerConfig(Config): self.print_pidfile = config.get("print_pidfile") self.user_agent_suffix = config.get("user_agent_suffix") self.use_frozen_dicts = config.get("use_frozen_dicts", False) + self.public_baseurl = config.get("public_baseurl") + if self.public_baseurl is not None: + if self.public_baseurl[-1] != "/": + self.public_baseurl += "/" # Whether to enable user presence. presence_config = config.get("presence") or {} @@ -408,10 +411,6 @@ class ServerConfig(Config): config_path=("federation_ip_range_blacklist",), ) - if self.public_baseurl is not None: - if self.public_baseurl[-1] != "/": - self.public_baseurl += "/" - # (undocumented) option for torturing the worker-mode replication a bit, # for testing. The value defines the number of milliseconds to pause before # sending out any replication updates. diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices.py index 57f69dc8e2..48bf3241b6 100644 --- a/synapse/config/server_notices_config.py +++ b/synapse/config/server_notices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py index 3d05abc158..447ba3303b 100644 --- a/synapse/config/spam_checker.py +++ b/synapse/config/spam_checker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/sso.py b/synapse/config/sso.py index 243cc681e8..af645c930d 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/stats.py b/synapse/config/stats.py index 2258329a52..3d44b51201 100644 --- a/synapse/config/stats.py +++ b/synapse/config/stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py index c04e1c4e07..f502ff539e 100644 --- a/synapse/config/third_party_event_rules.py +++ b/synapse/config/third_party_event_rules.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 85b5db4c40..b041869758 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py index 727a1e7008..db22b5b19f 100644 --- a/synapse/config/tracer.py +++ b/synapse/config/tracer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C.d # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py index ab7770e472..3ac1f2b5b1 100644 --- a/synapse/config/user_directory.py +++ b/synapse/config/user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/config/workers.py b/synapse/config/workers.py index ac92375a85..462630201d 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -65,6 +64,14 @@ class WriterLocations: Attributes: events: The instances that write to the event and backfill streams. typing: The instance that writes to the typing stream. + to_device: The instances that write to the to_device stream. Currently + can only be a single instance. + account_data: The instances that write to the account data streams. Currently + can only be a single instance. + receipts: The instances that write to the receipts stream. Currently + can only be a single instance. + presence: The instances that write to the presence stream. Currently + can only be a single instance. """ events = attr.ib( @@ -86,6 +93,11 @@ class WriterLocations: type=List[str], converter=_instance_to_list_converter, ) + presence = attr.ib( + default=["master"], + type=List[str], + converter=_instance_to_list_converter, + ) class WorkerConfig(Config): @@ -189,7 +201,14 @@ class WorkerConfig(Config): # Check that the configured writers for events and typing also appears in # `instance_map`. - for stream in ("events", "typing", "to_device", "account_data", "receipts"): + for stream in ( + "events", + "typing", + "to_device", + "account_data", + "receipts", + "presence", + ): instances = _instance_to_list_converter(getattr(self.writers, stream)) for instance in instances: if instance != "master" and instance not in self.instance_map: @@ -216,6 +235,11 @@ class WorkerConfig(Config): if len(self.writers.events) == 0: raise ConfigError("Must specify at least one instance to handle `events`.") + if len(self.writers.presence) != 1: + raise ConfigError( + "Must only specify one instance to handle `presence` messages." + ) + self.events_shard_config = RoutableShardedWorkerHandlingConfig( self.writers.events ) diff --git a/synapse/crypto/__init__.py b/synapse/crypto/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/crypto/__init__.py +++ b/synapse/crypto/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 8fb116ae18..0f2b632e47 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index d5fb51513b..5f18ef7748 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017, 2018 New Vector Ltd # @@ -502,7 +501,7 @@ class StoreKeyFetcher(KeyFetcher): class BaseV2KeyFetcher(KeyFetcher): def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() - self.config = hs.get_config() + self.config = hs.config async def process_v2_response( self, from_server: str, response_json: JsonDict, time_added_ms: int diff --git a/synapse/event_auth.py b/synapse/event_auth.py index b2d8ba7849..77831b62de 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -15,14 +14,14 @@ # limitations under the License. import logging -from typing import List, Optional, Set, Tuple +from typing import Any, Dict, List, Optional, Set, Tuple from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes from signedjson.sign import SignatureVerifyException, verify_signed_json from unpaddedbase64 import decode_base64 -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import MAX_PDU_SIZE, EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, EventSizeError, SynapseError from synapse.api.room_versions import ( KNOWN_ROOM_VERSIONS, @@ -207,7 +206,7 @@ def _check_size_limits(event: EventBase) -> None: too_big("type") if len(event.event_id) > 255: too_big("event_id") - if len(encode_canonical_json(event.get_pdu_json())) > 65536: + if len(encode_canonical_json(event.get_pdu_json())) > MAX_PDU_SIZE: too_big("event") @@ -687,7 +686,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase public_key = public_key_object["public_key"] try: for server, signature_block in signed["signatures"].items(): - for key_name, encoded_signature in signature_block.items(): + for key_name in signature_block.keys(): if not key_name.startswith("ed25519:"): continue verify_key = decode_verify_key_bytes( @@ -705,7 +704,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase return False -def get_public_keys(invite_event): +def get_public_keys(invite_event: EventBase) -> List[Dict[str, Any]]: public_keys = [] if "public_key" in invite_event.content: o = {"public_key": invite_event.content["public_key"]} diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index f9032e3697..c8b52cbc7a 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/events/builder.py b/synapse/events/builder.py index c1c0426f6e..5793553a88 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py index 24cd389d80..6c37c8a7a4 100644 --- a/synapse/events/presence_router.py +++ b/synapse/events/presence_router.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 7295df74fe..f8d898c3b1 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index b5a9c71ee6..4e8ea84f92 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # @@ -16,12 +15,11 @@ import inspect import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Tuple, Union from synapse.rest.media.v1._base import FileInfo from synapse.rest.media.v1.media_storage import ReadableFileWrapper from synapse.spam_checker_api import RegistrationBehaviour -from synapse.types import Collection from synapse.util.async_helpers import maybe_awaitable if TYPE_CHECKING: diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py index 9767d23940..f7944fd834 100644 --- a/synapse/events/third_party_rules.py +++ b/synapse/events/third_party_rules.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 7050f0c885..ec96999e4e 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/events/validator.py b/synapse/events/validator.py index f8f3b1a31e..fa6987d7cb 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/federation/__init__.py b/synapse/federation/__init__.py index f5f0bdfca3..46300cba25 100644 --- a/synapse/federation/__init__.py +++ b/synapse/federation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 383737520a..949dcd4614 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 10244ee0d2..5125441df5 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyrignt 2020 Sorunome # Copyrignt 2020 The Matrix.org Foundation C.I.C. @@ -454,6 +453,28 @@ class FederationClient(FederationBase): return signed_auth + def _is_unknown_endpoint( + self, e: HttpResponseException, synapse_error: Optional[SynapseError] = None + ) -> bool: + """ + Returns true if the response was due to an endpoint being unimplemented. + + Args: + e: The error response received from the remote server. + synapse_error: The above error converted to a SynapseError. This is + automatically generated if not provided. + + """ + if synapse_error is None: + synapse_error = e.to_synapse_error() + # There is no good way to detect an "unknown" endpoint. + # + # Dendrite returns a 404 (with no body); synapse returns a 400 + # with M_UNRECOGNISED. + return e.code == 404 or ( + e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED + ) + async def _try_destination_list( self, description: str, @@ -471,9 +492,9 @@ class FederationClient(FederationBase): callback: Function to run for each server. Passed a single argument: the server_name to try. - If the callback raises a CodeMessageException with a 300/400 code, - attempts to perform the operation stop immediately and the exception is - reraised. + If the callback raises a CodeMessageException with a 300/400 code or + an UnsupportedRoomVersionError, attempts to perform the operation + stop immediately and the exception is reraised. Otherwise, if the callback raises an Exception the error is logged and the next server tried. Normally the stacktrace is logged but this is @@ -495,8 +516,7 @@ class FederationClient(FederationBase): continue try: - res = await callback(destination) - return res + return await callback(destination) except InvalidResponseError as e: logger.warning("Failed to %s via %s: %s", description, destination, e) except UnsupportedRoomVersionError: @@ -505,17 +525,15 @@ class FederationClient(FederationBase): synapse_error = e.to_synapse_error() failover = False + # Failover on an internal server error, or if the destination + # doesn't implemented the endpoint for some reason. if 500 <= e.code < 600: failover = True - elif failover_on_unknown_endpoint: - # there is no good way to detect an "unknown" endpoint. Dendrite - # returns a 404 (with no body); synapse returns a 400 - # with M_UNRECOGNISED. - if e.code == 404 or ( - e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED - ): - failover = True + elif failover_on_unknown_endpoint and self._is_unknown_endpoint( + e, synapse_error + ): + failover = True if not failover: raise synapse_error from e @@ -573,9 +591,8 @@ class FederationClient(FederationBase): UnsupportedRoomVersionError: if remote responds with a room version we don't understand. - SynapseError: if the chosen remote server returns a 300/400 code. - - RuntimeError: if no servers were reachable. + SynapseError: if the chosen remote server returns a 300/400 code, or + no servers successfully handle the request. """ valid_memberships = {Membership.JOIN, Membership.LEAVE, Membership.KNOCK} if membership not in valid_memberships: @@ -645,9 +662,8 @@ class FederationClient(FederationBase): ``auth_chain``. Raises: - SynapseError: if the chosen remote server returns a 300/400 code. - - RuntimeError: if no servers were reachable. + SynapseError: if the chosen remote server returns a 300/400 code, or + no servers successfully handle the request. """ async def send_request(destination) -> Dict[str, Any]: @@ -676,7 +692,7 @@ class FederationClient(FederationBase): if create_event is None: # If the state doesn't have a create event then the room is # invalid, and it would fail auth checks anyway. - raise SynapseError(400, "No create event in state") + raise InvalidResponseError("No create event in state") # the room version should be sane. create_room_version = create_event.content.get( @@ -749,16 +765,11 @@ class FederationClient(FederationBase): content=pdu.get_pdu_json(time_now), ) except HttpResponseException as e: - if e.code in [400, 404]: - err = e.to_synapse_error() - - # If we receive an error response that isn't a generic error, or an - # unrecognised endpoint error, we assume that the remote understands - # the v2 invite API and this is a legitimate error. - if err.errcode not in [Codes.UNKNOWN, Codes.UNRECOGNIZED]: - raise err - else: - raise e.to_synapse_error() + # If an error is received that is due to an unrecognised endpoint, + # fallback to the v1 endpoint. Otherwise consider it a legitmate error + # and raise. + if not self._is_unknown_endpoint(e): + raise logger.debug("Couldn't send_join with the v2 API, falling back to the v1 API") @@ -805,6 +816,11 @@ class FederationClient(FederationBase): Returns: The event as a dict as returned by the remote server + + Raises: + SynapseError: if the remote server returns an error or if the server + only supports the v1 endpoint and a room version other than "1" + or "2" is requested. """ time_now = self._clock.time_msec() @@ -820,28 +836,19 @@ class FederationClient(FederationBase): }, ) except HttpResponseException as e: - if e.code in [400, 404]: - err = e.to_synapse_error() - - # If we receive an error response that isn't a generic error, we - # assume that the remote understands the v2 invite API and this - # is a legitimate error. - if err.errcode != Codes.UNKNOWN: - raise err - - # Otherwise, we assume that the remote server doesn't understand - # the v2 invite API. That's ok provided the room uses old-style event - # IDs. + # If an error is received that is due to an unrecognised endpoint, + # fallback to the v1 endpoint if the room uses old-style event IDs. + # Otherwise consider it a legitmate error and raise. + err = e.to_synapse_error() + if self._is_unknown_endpoint(e, err): if room_version.event_format != EventFormatVersions.V1: raise SynapseError( 400, "User's homeserver does not support this room version", Codes.UNSUPPORTED_ROOM_VERSION, ) - elif e.code in (403, 429): - raise e.to_synapse_error() else: - raise + raise err # Didn't work, try v1 API. # Note the v1 API returns a tuple of `(200, content)` @@ -868,9 +875,8 @@ class FederationClient(FederationBase): pdu: event to be sent Raises: - SynapseError if the chosen remote server returns a 300/400 code. - - RuntimeError if no servers were reachable. + SynapseError: if the chosen remote server returns a 300/400 code, or + no servers successfully handle the request. """ async def send_request(destination: str) -> None: @@ -892,16 +898,11 @@ class FederationClient(FederationBase): content=pdu.get_pdu_json(time_now), ) except HttpResponseException as e: - if e.code in [400, 404]: - err = e.to_synapse_error() - - # If we receive an error response that isn't a generic error, or an - # unrecognised endpoint error, we assume that the remote understands - # the v2 invite API and this is a legitimate error. - if err.errcode not in [Codes.UNKNOWN, Codes.UNRECOGNIZED]: - raise err - else: - raise e.to_synapse_error() + # If an error is received that is due to an unrecognised endpoint, + # fallback to the v1 endpoint. Otherwise consider it a legitmate error + # and raise. + if not self._is_unknown_endpoint(e): + raise logger.debug("Couldn't send_leave with the v2 API, falling back to the v1 API") diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 794c1138a9..86f1f47ba7 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2019 Matrix.org Federation C.I.C @@ -138,7 +137,7 @@ class FederationServer(FederationBase): ) # type: ResponseCache[Tuple[str, str]] self._federation_metrics_domains = ( - hs.get_config().federation.federation_metrics_domains + hs.config.federation.federation_metrics_domains ) async def on_backfill_request( diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index ce5fc758f0..2f9c9bc2cd 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 0c18c49abb..65d76ea974 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -77,9 +76,6 @@ class FederationRemoteSendQueue(AbstractFederationSender): # Pending presence map user_id -> UserPresenceState self.presence_map = {} # type: Dict[str, UserPresenceState] - # Stream position -> list[user_id] - self.presence_changed = SortedDict() # type: SortedDict[int, List[str]] - # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) @@ -97,7 +93,7 @@ class FederationRemoteSendQueue(AbstractFederationSender): self.edus = SortedDict() # type: SortedDict[int, Edu] - # stream ID for the next entry into presence_changed/keyed_edu_changed/edus. + # stream ID for the next entry into keyed_edu_changed/edus. self.pos = 1 # map from stream ID to the time that stream entry was generated, so that we @@ -118,7 +114,6 @@ class FederationRemoteSendQueue(AbstractFederationSender): for queue_name in [ "presence_map", - "presence_changed", "keyed_edu", "keyed_edu_changed", "edus", @@ -156,23 +151,12 @@ class FederationRemoteSendQueue(AbstractFederationSender): """Clear all the queues from before a given position""" with Measure(self.clock, "send_queue._clear"): # Delete things out of presence maps - keys = self.presence_changed.keys() - i = self.presence_changed.bisect_left(position_to_delete) - for key in keys[:i]: - del self.presence_changed[key] - - user_ids = { - user_id for uids in self.presence_changed.values() for user_id in uids - } - keys = self.presence_destinations.keys() i = self.presence_destinations.bisect_left(position_to_delete) for key in keys[:i]: del self.presence_destinations[key] - user_ids.update( - user_id for user_id, _ in self.presence_destinations.values() - ) + user_ids = {user_id for user_id, _ in self.presence_destinations.values()} to_del = [ user_id for user_id in self.presence_map if user_id not in user_ids @@ -245,23 +229,6 @@ class FederationRemoteSendQueue(AbstractFederationSender): """ # nothing to do here: the replication listener will handle it. - def send_presence(self, states: List[UserPresenceState]) -> None: - """As per FederationSender - - Args: - states - """ - pos = self._next_pos() - - # We only want to send presence for our own users, so lets always just - # filter here just in case. - local_states = [s for s in states if self.is_mine_id(s.user_id)] - - self.presence_map.update({state.user_id: state for state in local_states}) - self.presence_changed[pos] = [state.user_id for state in local_states] - - self.notifier.on_new_replication_data() - def send_presence_to_destinations( self, states: Iterable[UserPresenceState], destinations: Iterable[str] ) -> None: @@ -326,18 +293,6 @@ class FederationRemoteSendQueue(AbstractFederationSender): # of the federation stream. rows = [] # type: List[Tuple[int, BaseFederationRow]] - # Fetch changed presence - i = self.presence_changed.bisect_right(from_token) - j = self.presence_changed.bisect_right(to_token) + 1 - dest_user_ids = [ - (pos, user_id) - for pos, user_id_list in self.presence_changed.items()[i:j] - for user_id in user_id_list - ] - - for (key, user_id) in dest_user_ids: - rows.append((key, PresenceRow(state=self.presence_map[user_id]))) - # Fetch presence to send to destinations i = self.presence_destinations.bisect_right(from_token) j = self.presence_destinations.bisect_right(to_token) + 1 @@ -428,22 +383,6 @@ class BaseFederationRow: raise NotImplementedError() -class PresenceRow( - BaseFederationRow, namedtuple("PresenceRow", ("state",)) # UserPresenceState -): - TypeId = "p" - - @staticmethod - def from_data(data): - return PresenceRow(state=UserPresenceState.from_dict(data)) - - def to_data(self): - return self.state.as_dict() - - def add_to_buffer(self, buff): - buff.presence.append(self.state) - - class PresenceDestinationsRow( BaseFederationRow, namedtuple( @@ -507,7 +446,6 @@ class EduRow(BaseFederationRow, namedtuple("EduRow", ("edu",))): # Edu _rowtypes = ( - PresenceRow, PresenceDestinationsRow, KeyedEduRow, EduRow, @@ -519,7 +457,6 @@ TypeToRow = {Row.TypeId: Row for Row in _rowtypes} ParsedFederationStreamData = namedtuple( "ParsedFederationStreamData", ( - "presence", # list(UserPresenceState) "presence_destinations", # list of tuples of UserPresenceState and destinations "keyed_edus", # dict of destination -> { key -> Edu } "edus", # dict of destination -> [Edu] @@ -544,7 +481,6 @@ def process_rows_for_federation( # them into the appropriate collection and then send them off. buff = ParsedFederationStreamData( - presence=[], presence_destinations=[], keyed_edus={}, edus={}, @@ -560,18 +496,15 @@ def process_rows_for_federation( parsed_row = RowType.from_data(row.data) parsed_row.add_to_buffer(buff) - if buff.presence: - transaction_queue.send_presence(buff.presence) - for state, destinations in buff.presence_destinations: transaction_queue.send_presence_to_destinations( states=[state], destinations=destinations ) - for destination, edu_map in buff.keyed_edus.items(): + for edu_map in buff.keyed_edus.values(): for key, edu in edu_map.items(): transaction_queue.send_edu(edu, key) - for destination, edu_list in buff.edus.items(): + for edu_list in buff.edus.values(): for edu in edu_list: transaction_queue.send_edu(edu, None) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index d821dcbf6a..deb40f4610 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,12 +26,7 @@ from synapse.events import EventBase from synapse.federation.sender.per_destination_queue import PerDestinationQueue from synapse.federation.sender.transaction_manager import TransactionManager from synapse.federation.units import Edu -from synapse.handlers.presence import get_interested_remotes -from synapse.logging.context import ( - make_deferred_yieldable, - preserve_fn, - run_in_background, -) +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics import ( LaterGauge, event_processing_loop_counter, @@ -41,7 +35,7 @@ from synapse.metrics import ( ) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import JsonDict, ReadReceipt, RoomStreamToken -from synapse.util.metrics import Measure, measure_func +from synapse.util.metrics import Measure if TYPE_CHECKING: from synapse.events.presence_router import PresenceRouter @@ -87,15 +81,6 @@ class AbstractFederationSender(metaclass=abc.ABCMeta): raise NotImplementedError() @abc.abstractmethod - def send_presence(self, states: List[UserPresenceState]) -> None: - """Send the new presence states to the appropriate destinations. - - This actually queues up the presence states ready for sending and - triggers a background task to process them and send out the transactions. - """ - raise NotImplementedError() - - @abc.abstractmethod def send_presence_to_destinations( self, states: Iterable[UserPresenceState], destinations: Iterable[str] ) -> None: @@ -183,11 +168,6 @@ class FederationSender(AbstractFederationSender): ), ) - # Map of user_id -> UserPresenceState for all the pending presence - # to be sent out by user_id. Entries here get processed and put in - # pending_presence_by_dest - self.pending_presence = {} # type: Dict[str, UserPresenceState] - LaterGauge( "synapse_federation_transaction_queue_pending_pdus", "", @@ -208,8 +188,6 @@ class FederationSender(AbstractFederationSender): self._is_processing = False self._last_poked_id = -1 - self._processing_pending_presence = False - # map from room_id to a set of PerDestinationQueues which we believe are # awaiting a call to flush_read_receipts_for_room. The presence of an entry # here for a given room means that we are rate-limiting RR flushes to that room, @@ -519,48 +497,6 @@ class FederationSender(AbstractFederationSender): for queue in queues: queue.flush_read_receipts_for_room(room_id) - @preserve_fn # the caller should not yield on this - async def send_presence(self, states: List[UserPresenceState]) -> None: - """Send the new presence states to the appropriate destinations. - - This actually queues up the presence states ready for sending and - triggers a background task to process them and send out the transactions. - """ - if not self.hs.config.use_presence: - # No-op if presence is disabled. - return - - # First we queue up the new presence by user ID, so multiple presence - # updates in quick succession are correctly handled. - # We only want to send presence for our own users, so lets always just - # filter here just in case. - self.pending_presence.update( - {state.user_id: state for state in states if self.is_mine_id(state.user_id)} - ) - - # We then handle the new pending presence in batches, first figuring - # out the destinations we need to send each state to and then poking it - # to attempt a new transaction. We linearize this so that we don't - # accidentally mess up the ordering and send multiple presence updates - # in the wrong order - if self._processing_pending_presence: - return - - self._processing_pending_presence = True - try: - while True: - states_map = self.pending_presence - self.pending_presence = {} - - if not states_map: - break - - await self._process_presence_inner(list(states_map.values())) - except Exception: - logger.exception("Error sending presence states to servers") - finally: - self._processing_pending_presence = False - def send_presence_to_destinations( self, states: Iterable[UserPresenceState], destinations: Iterable[str] ) -> None: @@ -572,6 +508,10 @@ class FederationSender(AbstractFederationSender): # No-op if presence is disabled. return + # Ensure we only send out presence states for local users. + for state in states: + assert self.is_mine_id(state.user_id) + for destination in destinations: if destination == self.server_name: continue @@ -581,40 +521,6 @@ class FederationSender(AbstractFederationSender): continue self._get_per_destination_queue(destination).send_presence(states) - @measure_func("txnqueue._process_presence") - async def _process_presence_inner(self, states: List[UserPresenceState]) -> None: - """Given a list of states populate self.pending_presence_by_dest and - poke to send a new transaction to each destination - """ - # We pull the presence router here instead of __init__ - # to prevent a dependency cycle: - # - # AuthHandler -> Notifier -> FederationSender - # -> PresenceRouter -> ModuleApi -> AuthHandler - if self._presence_router is None: - self._presence_router = self.hs.get_presence_router() - - assert self._presence_router is not None - - hosts_and_states = await get_interested_remotes( - self.store, - self._presence_router, - states, - self.state, - ) - - for destinations, states in hosts_and_states: - for destination in destinations: - if destination == self.server_name: - continue - - if not self._federation_shard_config.should_handle( - self._instance_name, destination - ): - continue - - self._get_per_destination_queue(destination).send_presence(states) - def build_and_send_edu( self, destination: str, diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index e9c8a9f20a..3b053ebcfb 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index 07b740c2f2..72a635830b 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -57,7 +56,7 @@ class TransactionManager: self._transport_layer = hs.get_federation_transport_client() self._federation_metrics_domains = ( - hs.get_config().federation.federation_metrics_domains + hs.config.federation.federation_metrics_domains ) # HACK to get unique tx id diff --git a/synapse/federation/transport/__init__.py b/synapse/federation/transport/__init__.py index 5db733af98..3c9a0f6944 100644 --- a/synapse/federation/transport/__init__.py +++ b/synapse/federation/transport/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index df7e9fbbc2..f842bfeb0d 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2020 Sorunome diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index c616de5f22..4d8542f8df 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2019-2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 0f8bf000ac..c83a261918 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 368c44708d..d2fc8be5f5 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 4b16a4ac29..a06d060ebf 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2018 New Vector Ltd # Copyright 2019 Michael Telatynski <7t3chguy@gmail.com> diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/handlers/__init__.py +++ b/synapse/handlers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index fb899aa90d..d800e16912 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index 1ce6d697ed..affb54e0ee 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 125902ac17..022789ea5f 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,38 +42,47 @@ class AccountValidityHandler: self.sendmail = self.hs.get_sendmail() self.clock = self.hs.get_clock() - self._account_validity_enabled = self.hs.config.account_validity_enabled + self._account_validity_enabled = ( + hs.config.account_validity.account_validity_enabled + ) self._account_validity_renew_by_email_enabled = ( - self.hs.config.account_validity_renew_by_email_enabled + hs.config.account_validity.account_validity_renew_by_email_enabled ) + self._show_users_in_user_directory = self.hs.config.show_users_in_user_directory self.profile_handler = self.hs.get_profile_handler() self._account_validity_period = None if self._account_validity_enabled: - self._account_validity_period = self.hs.config.account_validity_period + self._account_validity_period = ( + hs.config.account_validity.account_validity_period + ) if ( self._account_validity_enabled and self._account_validity_renew_by_email_enabled ): # Don't do email-specific configuration if renewal by email is disabled. - self._template_html = self.config.account_validity_template_html - self._template_text = self.config.account_validity_template_text + self._template_html = ( + hs.config.account_validity.account_validity_template_html + ) + self._template_text = ( + hs.config.account_validity.account_validity_template_text + ) account_validity_renew_email_subject = ( - self.hs.config.account_validity_renew_email_subject + hs.config.account_validity.account_validity_renew_email_subject ) try: - app_name = self.hs.config.email_app_name + app_name = hs.config.email_app_name self._subject = account_validity_renew_email_subject % {"app": app_name} - self._from_string = self.hs.config.email_notif_from % {"app": app_name} + self._from_string = hs.config.email_notif_from % {"app": app_name} except Exception: # If substitution failed, fall back to the bare strings. self._subject = account_validity_renew_email_subject - self._from_string = self.hs.config.email_notif_from + self._from_string = hs.config.email_notif_from self._raw_from = email.utils.parseaddr(self._from_string)[1] diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 2a25af6288..16ab93f580 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/acme_issuing_service.py b/synapse/handlers/acme_issuing_service.py index ae2a9dd9c2..a972d3fa0a 100644 --- a/synapse/handlers/acme_issuing_service.py +++ b/synapse/handlers/acme_issuing_service.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index c494de49a3..f72ded038e 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 9fb7ee335d..177310f0be 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Collection, Dict, List, Optional, Union from prometheus_client import Counter @@ -34,7 +33,7 @@ from synapse.metrics.background_process_metrics import ( wrap_as_background_process, ) from synapse.storage.databases.main.directory import RoomAliasMapping -from synapse.types import Collection, JsonDict, RoomAlias, RoomStreamToken, UserID +from synapse.types import JsonDict, RoomAlias, RoomStreamToken, UserID from synapse.util.metrics import Measure if TYPE_CHECKING: diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 08e413bc98..36f2450e2e 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2019 - 2020 The Matrix.org Foundation C.I.C. @@ -1249,7 +1248,7 @@ class AuthHandler(BaseHandler): # see if any of our auth providers want to know about this for provider in self.password_providers: - for token, token_id, device_id in tokens_and_devices: + for token, _, device_id in tokens_and_devices: await provider.on_logged_out( user_id=user_id, device_id=device_id, access_token=token ) diff --git a/synapse/handlers/cas_handler.py b/synapse/handlers/cas.py index 5060936f94..7346ccfe93 100644 --- a/synapse/handlers/cas_handler.py +++ b/synapse/handlers/cas.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 9167918257..d61da72efc 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017, 2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # @@ -50,7 +49,9 @@ class DeactivateAccountHandler(BaseHandler): if hs.config.run_background_tasks: hs.get_reactor().callWhenRunning(self._start_user_parting) - self._account_validity_enabled = hs.config.account_validity_enabled + self._account_validity_enabled = ( + hs.config.account_validity.account_validity_enabled + ) async def deactivate_account( self, diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 7e76db3e2a..95bdc5902a 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2019,2020 The Matrix.org Foundation C.I.C. @@ -15,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Set, Tuple from synapse.api import errors from synapse.api.constants import EventTypes @@ -29,7 +28,6 @@ from synapse.api.errors import ( from synapse.logging.opentracing import log_kv, set_tag, trace from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import ( - Collection, JsonDict, StreamToken, UserID, @@ -157,8 +155,7 @@ class DeviceWorkerHandler(BaseHandler): # The user may have left the room # TODO: Check if they actually did or if we were just invited. if room_id not in room_ids: - for key, event_id in current_state_ids.items(): - etype, state_key = key + for etype, state_key in current_state_ids.keys(): if etype != EventTypes.Member: continue possibly_left.add(state_key) @@ -180,8 +177,7 @@ class DeviceWorkerHandler(BaseHandler): log_kv( {"event": "encountered empty previous state", "room_id": room_id} ) - for key, event_id in current_state_ids.items(): - etype, state_key = key + for etype, state_key in current_state_ids.keys(): if etype != EventTypes.Member: continue possibly_changed.add(state_key) @@ -199,8 +195,7 @@ class DeviceWorkerHandler(BaseHandler): for state_dict in prev_state_ids.values(): member_event = state_dict.get((EventTypes.Member, user_id), None) if not member_event or member_event != current_member_id: - for key, event_id in current_state_ids.items(): - etype, state_key = key + for etype, state_key in current_state_ids.keys(): if etype != EventTypes.Member: continue possibly_changed.add(state_key) @@ -715,7 +710,7 @@ class DeviceListUpdater: # This can happen since we batch updates return - for device_id, stream_id, prev_ids, content in pending_updates: + for device_id, stream_id, prev_ids, _ in pending_updates: logger.debug( "Handling update %r/%r, ID: %r, prev: %r ", user_id, @@ -741,7 +736,7 @@ class DeviceListUpdater: else: # Simply update the single device, since we know that is the only # change (because of the single prev_id matching the current cache) - for device_id, stream_id, prev_ids, content in pending_updates: + for device_id, stream_id, _, content in pending_updates: await self.store.update_remote_device_list_cache_entry( user_id, device_id, content, stream_id ) @@ -930,6 +925,10 @@ class DeviceListUpdater: else: cached_devices = await self.store.get_cached_devices_for_user(user_id) if cached_devices == {d["device_id"]: d for d in devices}: + logging.info( + "Skipping device list resync for %s, as our cache matches already", + user_id, + ) devices = [] ignore_devices = True @@ -945,6 +944,9 @@ class DeviceListUpdater: await self.store.update_remote_device_list_cache( user_id, devices, stream_id ) + # mark the cache as valid, whether or not we actually processed any device + # list updates. + await self.store.mark_remote_user_device_cache_as_valid(user_id) device_ids = [device["device_id"] for device in devices] # Handle cross-signing keys. diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index c971eeb4d2..c5d631de07 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index abcf86352d..90932316f3 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 92b18378fc..974487800d 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index a910d246d6..31742236a9 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017, 2018 New Vector Ltd # Copyright 2019 Matrix.org Foundation C.I.C. # diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py new file mode 100644 index 0000000000..eff639f407 --- /dev/null +++ b/synapse/handlers/event_auth.py @@ -0,0 +1,86 @@ +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import TYPE_CHECKING + +from synapse.api.constants import EventTypes, JoinRules +from synapse.api.room_versions import RoomVersion +from synapse.types import StateMap + +if TYPE_CHECKING: + from synapse.server import HomeServer + + +class EventAuthHandler: + """ + This class contains methods for authenticating events added to room graphs. + """ + + def __init__(self, hs: "HomeServer"): + self._store = hs.get_datastore() + + async def can_join_without_invite( + self, state_ids: StateMap[str], room_version: RoomVersion, user_id: str + ) -> bool: + """ + Check whether a user can join a room without an invite. + + When joining a room with restricted joined rules (as defined in MSC3083), + the membership of spaces must be checked during join. + + Args: + state_ids: The state of the room as it currently is. + room_version: The room version of the room being joined. + user_id: The user joining the room. + + Returns: + True if the user can join the room, false otherwise. + """ + # This only applies to room versions which support the new join rule. + if not room_version.msc3083_join_rules: + return True + + # If there's no join rule, then it defaults to invite (so this doesn't apply). + join_rules_event_id = state_ids.get((EventTypes.JoinRules, ""), None) + if not join_rules_event_id: + return True + + # If the join rule is not restricted, this doesn't apply. + join_rules_event = await self._store.get_event(join_rules_event_id) + if join_rules_event.content.get("join_rule") != JoinRules.MSC3083_RESTRICTED: + return True + + # If allowed is of the wrong form, then only allow invited users. + allowed_spaces = join_rules_event.content.get("allow", []) + if not isinstance(allowed_spaces, list): + return False + + # Get the list of joined rooms and see if there's an overlap. + joined_rooms = await self._store.get_rooms_for_user(user_id) + + # Pull out the other room IDs, invalid data gets filtered. + for space in allowed_spaces: + if not isinstance(space, dict): + continue + + space_id = space.get("space") + if not isinstance(space_id, str): + continue + + # The user was joined to one of the spaces specified, they can join + # this room! + if space_id in joined_rooms: + return True + + # The user was not in any of the required spaces. + return False diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index f46cab7325..d82144d7fa 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 97e347c30b..b59e17ad4e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019-2020 The Matrix.org Foundation C.I.C. @@ -105,7 +104,7 @@ logger = logging.getLogger(__name__) @attr.s(slots=True) class _NewEventInfo: - """Holds information about a received event, ready for passing to _handle_new_events + """Holds information about a received event, ready for passing to _auth_and_persist_events Attributes: event: the received event @@ -148,6 +147,7 @@ class FederationHandler(BaseHandler): self.is_mine_id = hs.is_mine_id self.spam_checker = hs.get_spam_checker() self.event_creation_handler = hs.get_event_creation_handler() + self._event_auth_handler = hs.get_event_auth_handler() self._message_handler = hs.get_message_handler() self._server_notices_mxid = hs.config.server_notices_mxid self.config = hs.config @@ -810,7 +810,10 @@ class FederationHandler(BaseHandler): logger.debug("Processing event: %s", event) try: - await self._handle_new_event(origin, event, state=state) + context = await self.state_handler.compute_event_context( + event, old_state=state + ) + await self._auth_and_persist_event(origin, event, context, state=state) except AuthError as e: raise FederationError("ERROR", e.code, e.msg, affected=event.event_id) @@ -1013,7 +1016,9 @@ class FederationHandler(BaseHandler): ) if ev_infos: - await self._handle_new_events(dest, room_id, ev_infos, backfilled=True) + await self._auth_and_persist_events( + dest, room_id, ev_infos, backfilled=True + ) # Step 2: Persist the rest of the events in the chunk one by one events.sort(key=lambda e: e.depth) @@ -1026,10 +1031,12 @@ class FederationHandler(BaseHandler): # non-outliers assert not event.internal_metadata.is_outlier() + context = await self.state_handler.compute_event_context(event) + # We store these one at a time since each event depends on the # previous to work out the state. # TODO: We can probably do something more clever here. - await self._handle_new_event(dest, event, backfilled=True) + await self._auth_and_persist_event(dest, event, context, backfilled=True) return events @@ -1363,7 +1370,7 @@ class FederationHandler(BaseHandler): event_infos.append(_NewEventInfo(event, None, auth)) - await self._handle_new_events( + await self._auth_and_persist_events( destination, room_id, event_infos, @@ -1740,16 +1747,47 @@ class FederationHandler(BaseHandler): # would introduce the danger of backwards-compatibility problems. event.internal_metadata.send_on_behalf_of = origin - context = await self._handle_new_event(origin, event) + # Calculate the event context. + context = await self.state_handler.compute_event_context(event) + + # Get the state before the new event. + prev_state_ids = await context.get_prev_state_ids() + + # Check if the user is already in the room or invited to the room. + user_id = event.state_key + prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None) + newly_joined = True + user_is_invited = False + if prev_member_event_id: + prev_member_event = await self.store.get_event(prev_member_event_id) + newly_joined = prev_member_event.membership != Membership.JOIN + user_is_invited = prev_member_event.membership == Membership.INVITE + + # If the member is not already in the room, and not invited, check if + # they should be allowed access via membership in a space. + if ( + newly_joined + and not user_is_invited + and not await self._event_auth_handler.can_join_without_invite( + prev_state_ids, + event.room_version, + user_id, + ) + ): + raise AuthError( + 403, + "You do not belong to any of the required spaces to join this room.", + ) + + # Persist the event. + await self._auth_and_persist_event(origin, event, context) logger.debug( - "on_send_join_request: After _handle_new_event: %s, sigs: %s", + "on_send_join_request: After _auth_and_persist_event: %s, sigs: %s", event.event_id, event.signatures, ) - prev_state_ids = await context.get_prev_state_ids() - state_ids = list(prev_state_ids.values()) auth_chain = await self.store.get_auth_chain(event.room_id, state_ids) @@ -1959,10 +1997,11 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = False - await self._handle_new_event(origin, event) + context = await self.state_handler.compute_event_context(event) + await self._auth_and_persist_event(origin, event, context) logger.debug( - "on_send_leave_request: After _handle_new_event: %s, sigs: %s", + "on_send_leave_request: After _auth_and_persist_event: %s, sigs: %s", event.event_id, event.signatures, ) @@ -2184,16 +2223,44 @@ class FederationHandler(BaseHandler): async def get_min_depth_for_context(self, context: str) -> int: return await self.store.get_min_depth(context) - async def _handle_new_event( + async def _auth_and_persist_event( self, origin: str, event: EventBase, + context: EventContext, state: Optional[Iterable[EventBase]] = None, auth_events: Optional[MutableStateMap[EventBase]] = None, backfilled: bool = False, - ) -> EventContext: - context = await self._prep_event( - origin, event, state=state, auth_events=auth_events, backfilled=backfilled + ) -> None: + """ + Process an event by performing auth checks and then persisting to the database. + + Args: + origin: The host the event originates from. + event: The event itself. + context: + The event context. + + NB that this function potentially modifies it. + state: + The state events used to check the event for soft-fail. If this is + not provided the current state events will be used. + auth_events: + Map from (event_type, state_key) to event + + Normally, our calculated auth_events based on the state of the room + at the event's position in the DAG, though occasionally (eg if the + event is an outlier), may be the auth events claimed by the remote + server. + backfilled: True if the event was backfilled. + """ + context = await self._check_event_auth( + origin, + event, + context, + state=state, + auth_events=auth_events, + backfilled=backfilled, ) try: @@ -2215,9 +2282,7 @@ class FederationHandler(BaseHandler): ) raise - return context - - async def _handle_new_events( + async def _auth_and_persist_events( self, origin: str, room_id: str, @@ -2235,9 +2300,13 @@ class FederationHandler(BaseHandler): async def prep(ev_info: _NewEventInfo): event = ev_info.event with nested_logging_context(suffix=event.event_id): - res = await self._prep_event( + res = await self.state_handler.compute_event_context( + event, old_state=ev_info.state + ) + res = await self._check_event_auth( origin, event, + res, state=ev_info.state, auth_events=ev_info.auth_events, backfilled=backfilled, @@ -2372,49 +2441,6 @@ class FederationHandler(BaseHandler): room_id, [(event, new_event_context)] ) - async def _prep_event( - self, - origin: str, - event: EventBase, - state: Optional[Iterable[EventBase]], - auth_events: Optional[MutableStateMap[EventBase]], - backfilled: bool, - ) -> EventContext: - context = await self.state_handler.compute_event_context(event, old_state=state) - - if not auth_events: - prev_state_ids = await context.get_prev_state_ids() - auth_events_ids = self.auth.compute_auth_events( - event, prev_state_ids, for_verification=True - ) - auth_events_x = await self.store.get_events(auth_events_ids) - auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()} - - # This is a hack to fix some old rooms where the initial join event - # didn't reference the create event in its auth events. - if event.type == EventTypes.Member and not event.auth_event_ids(): - if len(event.prev_event_ids()) == 1 and event.depth < 5: - c = await self.store.get_event( - event.prev_event_ids()[0], allow_none=True - ) - if c and c.type == EventTypes.Create: - auth_events[(c.type, c.state_key)] = c - - context = await self.do_auth(origin, event, context, auth_events=auth_events) - - if not context.rejected: - await self._check_for_soft_fail(event, state, backfilled) - - if event.type == EventTypes.GuestAccess and not context.rejected: - await self.maybe_kick_guest_users(event) - - # If we are going to send this event over federation we precaclculate - # the joined hosts. - if event.internal_metadata.get_send_on_behalf_of(): - await self.event_creation_handler.cache_joined_hosts_for_event(event) - - return context - async def _check_for_soft_fail( self, event: EventBase, state: Optional[Iterable[EventBase]], backfilled: bool ) -> None: @@ -2525,19 +2551,28 @@ class FederationHandler(BaseHandler): return missing_events - async def do_auth( + async def _check_event_auth( self, origin: str, event: EventBase, context: EventContext, - auth_events: MutableStateMap[EventBase], + state: Optional[Iterable[EventBase]], + auth_events: Optional[MutableStateMap[EventBase]], + backfilled: bool, ) -> EventContext: """ + Checks whether an event should be rejected (for failing auth checks). Args: - origin: - event: + origin: The host the event originates from. + event: The event itself. context: + The event context. + + NB that this function potentially modifies it. + state: + The state events used to check the event for soft-fail. If this is + not provided the current state events will be used. auth_events: Map from (event_type, state_key) to event @@ -2547,12 +2582,34 @@ class FederationHandler(BaseHandler): server. Also NB that this function adds entries to it. + + If this is not provided, it is calculated from the previous state IDs. + backfilled: True if the event was backfilled. + Returns: - updated context object + The updated context object. """ room_version = await self.store.get_room_version_id(event.room_id) room_version_obj = KNOWN_ROOM_VERSIONS[room_version] + if not auth_events: + prev_state_ids = await context.get_prev_state_ids() + auth_events_ids = self.auth.compute_auth_events( + event, prev_state_ids, for_verification=True + ) + auth_events_x = await self.store.get_events(auth_events_ids) + auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()} + + # This is a hack to fix some old rooms where the initial join event + # didn't reference the create event in its auth events. + if event.type == EventTypes.Member and not event.auth_event_ids(): + if len(event.prev_event_ids()) == 1 and event.depth < 5: + c = await self.store.get_event( + event.prev_event_ids()[0], allow_none=True + ) + if c and c.type == EventTypes.Create: + auth_events[(c.type, c.state_key)] = c + try: context = await self._update_auth_events_and_context_for_auth( origin, event, context, auth_events @@ -2574,6 +2631,17 @@ class FederationHandler(BaseHandler): logger.warning("Failed auth resolution for %r because %s", event, e) context.rejected = RejectedReason.AUTH_ERROR + if not context.rejected: + await self._check_for_soft_fail(event, state, backfilled) + + if event.type == EventTypes.GuestAccess and not context.rejected: + await self.maybe_kick_guest_users(event) + + # If we are going to send this event over federation we precaclculate + # the joined hosts. + if event.internal_metadata.get_send_on_behalf_of(): + await self.event_creation_handler.cache_joined_hosts_for_event(event) + return context async def _update_auth_events_and_context_for_auth( @@ -2583,7 +2651,7 @@ class FederationHandler(BaseHandler): context: EventContext, auth_events: MutableStateMap[EventBase], ) -> EventContext: - """Helper for do_auth. See there for docs. + """Helper for _check_event_auth. See there for docs. Checks whether a given event has the expected auth events. If it doesn't then we talk to the remote server to compare state to see if @@ -2663,9 +2731,14 @@ class FederationHandler(BaseHandler): e.internal_metadata.outlier = True logger.debug( - "do_auth %s missing_auth: %s", event.event_id, e.event_id + "_check_event_auth %s missing_auth: %s", + event.event_id, + e.event_id, + ) + context = await self.state_handler.compute_event_context(e) + await self._auth_and_persist_event( + origin, e, context, auth_events=auth ) - await self._handle_new_event(origin, e, auth_events=auth) if e.event_id in event_auth_events: auth_events[(e.type, e.state_key)] = e @@ -3103,7 +3176,7 @@ class FederationHandler(BaseHandler): try: # for each sig on the third_party_invite block of the actual invite for server, signature_block in signed["signatures"].items(): - for key_name, encoded_signature in signature_block.items(): + for key_name in signature_block.keys(): if not key_name.startswith("ed25519:"): continue diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index a41ca5df9c..157f2ff218 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 88100bc7da..3d686df04c 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018, 2019 New Vector Ltd @@ -16,7 +15,6 @@ # limitations under the License. """Utilities for interacting with Identity Servers""" - import logging import urllib.parse from typing import Awaitable, Callable, Dict, List, Optional, Tuple @@ -37,7 +35,11 @@ from synapse.http.site import SynapseRequest from synapse.types import JsonDict, Requester from synapse.util import json_decoder from synapse.util.hash import sha256_and_url_safe_base64 -from synapse.util.stringutils import assert_valid_client_secret, random_string +from synapse.util.stringutils import ( + assert_valid_client_secret, + random_string, + valid_id_server_location, +) from ._base import BaseHandler @@ -183,6 +185,11 @@ class IdentityHandler(BaseHandler): server with, if necessary. Required if use_v2 is true use_v2: Whether to use v2 Identity Service API endpoints. Defaults to True + Raises: + SynapseError: On any of the following conditions + - the supplied id_server is not a valid identity server name + - we failed to contact the supplied identity server + Returns: The response from the identity server """ @@ -197,6 +204,12 @@ class IdentityHandler(BaseHandler): # storing in the database). id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + if not valid_id_server_location(id_server_url): + raise SynapseError( + 400, + "id_server must be a valid hostname with optional port and path components", + ) + # Decide which API endpoint URLs to use headers = {} bind_data = {"sid": sid, "client_secret": client_secret, "mxid": mxid} @@ -285,12 +298,24 @@ class IdentityHandler(BaseHandler): id_server: Identity server to unbind from Raises: - SynapseError: If we failed to contact the identity server + SynapseError: On any of the following conditions + - the supplied id_server is not a valid identity server name + - we failed to contact the supplied identity server Returns: True on success, otherwise False if the identity server doesn't support unbinding """ + + if not valid_id_server_location(id_server): + raise SynapseError( + 400, + "id_server must be a valid hostname with optional port and path components", + ) + + url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,) + url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii") + content = { "mxid": mxid, "threepid": {"medium": threepid["medium"], "address": threepid["address"]}, diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 13f8152283..76242865ae 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index bfff43c802..02db753ed6 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019-2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc.py index 6624212d6f..ee6e41c0e4 100644 --- a/synapse/handlers/oidc_handler.py +++ b/synapse/handlers/oidc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # Copyright 2021 The Matrix.org Foundation C.I.C. # @@ -16,7 +15,7 @@ import inspect import logging from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar, Union -from urllib.parse import urlencode +from urllib.parse import urlencode, urlparse import attr import pymacaroons @@ -38,10 +37,7 @@ from twisted.web.client import readBody from twisted.web.http_headers import Headers from synapse.config import ConfigError -from synapse.config.oidc_config import ( - OidcProviderClientSecretJwtKey, - OidcProviderConfig, -) +from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig from synapse.handlers.sso import MappingException, UserAttributes from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable @@ -72,8 +68,8 @@ logger = logging.getLogger(__name__) # # Here we have the names of the cookies, and the options we use to set them. _SESSION_COOKIES = [ - (b"oidc_session", b"Path=/_synapse/client/oidc; HttpOnly; Secure; SameSite=None"), - (b"oidc_session_no_samesite", b"Path=/_synapse/client/oidc; HttpOnly"), + (b"oidc_session", b"HttpOnly; Secure; SameSite=None"), + (b"oidc_session_no_samesite", b"HttpOnly"), ] #: A token exchanged from the token endpoint, as per RFC6749 sec 5.1. and @@ -283,6 +279,13 @@ class OidcProvider: self._config = provider self._callback_url = hs.config.oidc_callback_url # type: str + # Calculate the prefix for OIDC callback paths based on the public_baseurl. + # We'll insert this into the Path= parameter of any session cookies we set. + public_baseurl_path = urlparse(hs.config.server.public_baseurl).path + self._callback_path_prefix = ( + public_baseurl_path.encode("utf-8") + b"_synapse/client/oidc" + ) + self._oidc_attribute_requirements = provider.attribute_requirements self._scopes = provider.scopes self._user_profile_method = provider.user_profile_method @@ -783,8 +786,13 @@ class OidcProvider: for cookie_name, options in _SESSION_COOKIES: request.cookies.append( - b"%s=%s; Max-Age=3600; %s" - % (cookie_name, cookie.encode("utf-8"), options) + b"%s=%s; Max-Age=3600; Path=%s; %s" + % ( + cookie_name, + cookie.encode("utf-8"), + self._callback_path_prefix, + options, + ) ) metadata = await self.load_metadata() @@ -961,6 +969,11 @@ class OidcProvider: # and attempt to match it. attributes = await oidc_response_to_user_attributes(failures=0) + if attributes.localpart is None: + # If no localpart is returned then we will generate one, so + # there is no need to search for existing users. + return None + user_id = UserID(attributes.localpart, self._server_name).to_string() users = await self._store.get_users_by_id_case_insensitive(user_id) if users: diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 66dc886c81..1e1186c29e 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # Copyright 2017 - 2018 New Vector Ltd # diff --git a/synapse/handlers/password_policy.py b/synapse/handlers/password_policy.py index 92cefa11aa..cd21efdcc6 100644 --- a/synapse/handlers/password_policy.py +++ b/synapse/handlers/password_policy.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 0047907cd9..12df35f26e 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -23,10 +22,13 @@ The methods that define policy are: - should_notify """ import abc +import contextlib import logging +from bisect import bisect from contextlib import contextmanager from typing import ( TYPE_CHECKING, + Collection, Dict, FrozenSet, Iterable, @@ -49,9 +51,15 @@ from synapse.logging.context import run_in_background from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.state import StateHandler +from synapse.replication.http.presence import ( + ReplicationBumpPresenceActiveTime, + ReplicationPresenceSetState, +) +from synapse.replication.http.streams import ReplicationGetStreamUpdates +from synapse.replication.tcp.commands import ClearUserSyncsCommand +from synapse.replication.tcp.streams import PresenceFederationStream, PresenceStream from synapse.storage.databases.main import DataStore -from synapse.types import Collection, JsonDict, UserID, get_domain_from_id +from synapse.types import JsonDict, UserID, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.caches.descriptors import _CacheContext, cached from synapse.util.metrics import Measure @@ -105,15 +113,29 @@ FEDERATION_PING_INTERVAL = 25 * 60 * 1000 # are dead. EXTERNAL_PROCESS_EXPIRY = 5 * 60 * 1000 +# Delay before a worker tells the presence handler that a user has stopped +# syncing. +UPDATE_SYNCING_USERS_MS = 10 * 1000 + assert LAST_ACTIVE_GRANULARITY < IDLE_TIMER class BasePresenceHandler(abc.ABC): - """Parts of the PresenceHandler that are shared between workers and master""" + """Parts of the PresenceHandler that are shared between workers and presence + writer""" def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.store = hs.get_datastore() + self.presence_router = hs.get_presence_router() + self.state = hs.get_state_handler() + self.is_mine_id = hs.is_mine_id + + self._federation = None + if hs.should_send_federation(): + self._federation = hs.get_federation_sender() + + self._federation_queue = PresenceFederationQueue(hs, self) self._busy_presence_enabled = hs.config.experimental.msc3026_enabled @@ -209,18 +231,305 @@ class BasePresenceHandler(abc.ABC): with the app. """ + async def update_external_syncs_row( + self, process_id, user_id, is_syncing, sync_time_msec + ): + """Update the syncing users for an external process as a delta. + + This is a no-op when presence is handled by a different worker. + + Args: + process_id (str): An identifier for the process the users are + syncing against. This allows synapse to process updates + as user start and stop syncing against a given process. + user_id (str): The user who has started or stopped syncing + is_syncing (bool): Whether or not the user is now syncing + sync_time_msec(int): Time in ms when the user was last syncing + """ + pass + + async def update_external_syncs_clear(self, process_id): + """Marks all users that had been marked as syncing by a given process + as offline. + + Used when the process has stopped/disappeared. + + This is a no-op when presence is handled by a different worker. + """ + pass + + async def process_replication_rows( + self, stream_name: str, instance_name: str, token: int, rows: list + ): + """Process streams received over replication.""" + await self._federation_queue.process_replication_rows( + stream_name, instance_name, token, rows + ) + + def get_federation_queue(self) -> "PresenceFederationQueue": + """Get the presence federation queue.""" + return self._federation_queue + + async def maybe_send_presence_to_interested_destinations( + self, states: List[UserPresenceState] + ): + """If this instance is a federation sender, send the states to all + destinations that are interested. Filters out any states for remote + users. + """ + + if not self._federation: + return + + states = [s for s in states if self.is_mine_id(s.user_id)] + + if not states: + return + + hosts_and_states = await get_interested_remotes( + self.store, + self.presence_router, + states, + ) + + for destinations, states in hosts_and_states: + self._federation.send_presence_to_destinations(states, destinations) + + +class _NullContextManager(ContextManager[None]): + """A context manager which does nothing.""" + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + +class WorkerPresenceHandler(BasePresenceHandler): + def __init__(self, hs): + super().__init__(hs) + self.hs = hs + + self._presence_writer_instance = hs.config.worker.writers.presence[0] + + self._presence_enabled = hs.config.use_presence + + # Route presence EDUs to the right worker + hs.get_federation_registry().register_instances_for_edu( + "m.presence", + hs.config.worker.writers.presence, + ) + + # The number of ongoing syncs on this process, by user id. + # Empty if _presence_enabled is false. + self._user_to_num_current_syncs = {} # type: Dict[str, int] + + self.notifier = hs.get_notifier() + self.instance_id = hs.get_instance_id() + + # user_id -> last_sync_ms. Lists the users that have stopped syncing but + # we haven't notified the presence writer of that yet + self.users_going_offline = {} + + self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs) + self._set_state_client = ReplicationPresenceSetState.make_client(hs) + + self._send_stop_syncing_loop = self.clock.looping_call( + self.send_stop_syncing, UPDATE_SYNCING_USERS_MS + ) + + self._busy_presence_enabled = hs.config.experimental.msc3026_enabled + + hs.get_reactor().addSystemEventTrigger( + "before", + "shutdown", + run_as_background_process, + "generic_presence.on_shutdown", + self._on_shutdown, + ) + + def _on_shutdown(self): + if self._presence_enabled: + self.hs.get_tcp_replication().send_command( + ClearUserSyncsCommand(self.instance_id) + ) + + def send_user_sync(self, user_id, is_syncing, last_sync_ms): + if self._presence_enabled: + self.hs.get_tcp_replication().send_user_sync( + self.instance_id, user_id, is_syncing, last_sync_ms + ) + + def mark_as_coming_online(self, user_id): + """A user has started syncing. Send a UserSync to the presence writer, + unless they had recently stopped syncing. + + Args: + user_id (str) + """ + going_offline = self.users_going_offline.pop(user_id, None) + if not going_offline: + # Safe to skip because we haven't yet told the presence writer they + # were offline + self.send_user_sync(user_id, True, self.clock.time_msec()) + + def mark_as_going_offline(self, user_id): + """A user has stopped syncing. We wait before notifying the presence + writer as its likely they'll come back soon. This allows us to avoid + sending a stopped syncing immediately followed by a started syncing + notification to the presence writer + + Args: + user_id (str) + """ + self.users_going_offline[user_id] = self.clock.time_msec() + + def send_stop_syncing(self): + """Check if there are any users who have stopped syncing a while ago and + haven't come back yet. If there are poke the presence writer about them. + """ + now = self.clock.time_msec() + for user_id, last_sync_ms in list(self.users_going_offline.items()): + if now - last_sync_ms > UPDATE_SYNCING_USERS_MS: + self.users_going_offline.pop(user_id, None) + self.send_user_sync(user_id, False, last_sync_ms) + + async def user_syncing( + self, user_id: str, affect_presence: bool + ) -> ContextManager[None]: + """Record that a user is syncing. + + Called by the sync and events servlets to record that a user has connected to + this worker and is waiting for some events. + """ + if not affect_presence or not self._presence_enabled: + return _NullContextManager() + + curr_sync = self._user_to_num_current_syncs.get(user_id, 0) + self._user_to_num_current_syncs[user_id] = curr_sync + 1 + + # If we went from no in flight sync to some, notify replication + if self._user_to_num_current_syncs[user_id] == 1: + self.mark_as_coming_online(user_id) + + def _end(): + # We check that the user_id is in user_to_num_current_syncs because + # user_to_num_current_syncs may have been cleared if we are + # shutting down. + if user_id in self._user_to_num_current_syncs: + self._user_to_num_current_syncs[user_id] -= 1 + + # If we went from one in flight sync to non, notify replication + if self._user_to_num_current_syncs[user_id] == 0: + self.mark_as_going_offline(user_id) + + @contextlib.contextmanager + def _user_syncing(): + try: + yield + finally: + _end() + + return _user_syncing() + + async def notify_from_replication(self, states, stream_id): + parties = await get_interested_parties(self.store, self.presence_router, states) + room_ids_to_states, users_to_states = parties + + self.notifier.on_new_event( + "presence_key", + stream_id, + rooms=room_ids_to_states.keys(), + users=users_to_states.keys(), + ) + + # If this is a federation sender, notify about presence updates. + await self.maybe_send_presence_to_interested_destinations(states) + + async def process_replication_rows( + self, stream_name: str, instance_name: str, token: int, rows: list + ): + await super().process_replication_rows(stream_name, instance_name, token, rows) + + if stream_name != PresenceStream.NAME: + return + + states = [ + UserPresenceState( + row.user_id, + row.state, + row.last_active_ts, + row.last_federation_update_ts, + row.last_user_sync_ts, + row.status_msg, + row.currently_active, + ) + for row in rows + ] + + for state in states: + self.user_to_current_state[state.user_id] = state + + stream_id = token + await self.notify_from_replication(states, stream_id) + + def get_currently_syncing_users_for_replication(self) -> Iterable[str]: + return [ + user_id + for user_id, count in self._user_to_num_current_syncs.items() + if count > 0 + ] + + async def set_state(self, target_user, state, ignore_status_msg=False): + """Set the presence state of the user.""" + presence = state["presence"] + + valid_presence = ( + PresenceState.ONLINE, + PresenceState.UNAVAILABLE, + PresenceState.OFFLINE, + PresenceState.BUSY, + ) + + if presence not in valid_presence or ( + presence == PresenceState.BUSY and not self._busy_presence_enabled + ): + raise SynapseError(400, "Invalid presence state") + + user_id = target_user.to_string() + + # If presence is disabled, no-op + if not self.hs.config.use_presence: + return + + # Proxy request to instance that writes presence + await self._set_state_client( + instance_name=self._presence_writer_instance, + user_id=user_id, + state=state, + ignore_status_msg=ignore_status_msg, + ) + + async def bump_presence_active_time(self, user): + """We've seen the user do something that indicates they're interacting + with the app. + """ + # If presence is disabled, no-op + if not self.hs.config.use_presence: + return + + # Proxy request to instance that writes presence + user_id = user.to_string() + await self._bump_active_client( + instance_name=self._presence_writer_instance, user_id=user_id + ) + class PresenceHandler(BasePresenceHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) self.hs = hs - self.is_mine_id = hs.is_mine_id self.server_name = hs.hostname self.wheel_timer = WheelTimer() self.notifier = hs.get_notifier() - self.federation = hs.get_federation_sender() - self.state = hs.get_state_handler() - self.presence_router = hs.get_presence_router() self._presence_enabled = hs.config.use_presence federation_registry = hs.get_federation_registry() @@ -427,6 +736,13 @@ class PresenceHandler(BasePresenceHandler): self.unpersisted_users_changes |= {s.user_id for s in new_states} self.unpersisted_users_changes -= set(to_notify.keys()) + # Check if we need to resend any presence states to remote hosts. We + # only do this for states that haven't been updated in a while to + # ensure that the remote host doesn't time the presence state out. + # + # Note that since these are states that have *not* been updated, + # they won't get sent down the normal presence replication stream, + # and so we have to explicitly send them via the federation stream. to_federation_ping = { user_id: state for user_id, state in to_federation_ping.items() @@ -435,7 +751,16 @@ class PresenceHandler(BasePresenceHandler): if to_federation_ping: federation_presence_out_counter.inc(len(to_federation_ping)) - self._push_to_remotes(to_federation_ping.values()) + hosts_and_states = await get_interested_remotes( + self.store, + self.presence_router, + list(to_federation_ping.values()), + ) + + for destinations, states in hosts_and_states: + self._federation_queue.send_presence_to_destinations( + states, destinations + ) async def _handle_timeouts(self): """Checks the presence of users that have timed out and updates as @@ -675,15 +1000,10 @@ class PresenceHandler(BasePresenceHandler): users=[UserID.from_string(u) for u in users_to_states], ) - self._push_to_remotes(states) - - def _push_to_remotes(self, states): - """Sends state updates to remote servers. - - Args: - states (list(UserPresenceState)) - """ - self.federation.send_presence(states) + # We only want to poke the local federation sender, if any, as other + # workers will receive the presence updates via the presence replication + # stream (which is updated by `store.update_presence`). + await self.maybe_send_presence_to_interested_destinations(states) async def incoming_presence(self, origin, content): """Called when we receive a `m.presence` EDU from a remote server.""" @@ -921,7 +1241,7 @@ class PresenceHandler(BasePresenceHandler): # Send out user presence updates for each destination for destination, user_state_set in presence_destinations.items(): - self.federation.send_presence_to_destinations( + self._federation_queue.send_presence_to_destinations( destinations=[destination], states=user_state_set ) @@ -1061,7 +1381,6 @@ class PresenceEventSource: self.get_presence_router = hs.get_presence_router self.clock = hs.get_clock() self.store = hs.get_datastore() - self.state = hs.get_state_handler() @log_function async def get_new_events( @@ -1530,7 +1849,6 @@ async def get_interested_remotes( store: DataStore, presence_router: PresenceRouter, states: List[UserPresenceState], - state_handler: StateHandler, ) -> List[Tuple[Collection[str], List[UserPresenceState]]]: """Given a list of presence states figure out which remote servers should be sent which. @@ -1541,7 +1859,6 @@ async def get_interested_remotes( store: The homeserver's data store. presence_router: A module for augmenting the destinations for presence updates. states: A list of incoming user presence updates. - state_handler: Returns: A list of 2-tuples of destinations and states, where for @@ -1558,7 +1875,8 @@ async def get_interested_remotes( ) for room_id, states in room_ids_to_states.items(): - hosts = await state_handler.get_current_hosts_in_room(room_id) + user_ids = await store.get_users_in_room(room_id) + hosts = {get_domain_from_id(user_id) for user_id in user_ids} hosts_and_states.append((hosts, states)) for user_id, states in users_to_states.items(): @@ -1566,3 +1884,220 @@ async def get_interested_remotes( hosts_and_states.append(([host], states)) return hosts_and_states + + +class PresenceFederationQueue: + """Handles sending ad hoc presence updates over federation, which are *not* + due to state updates (that get handled via the presence stream), e.g. + federation pings and sending existing present states to newly joined hosts. + + Only the last N minutes will be queued, so if a federation sender instance + is down for longer then some updates will be dropped. This is OK as presence + is ephemeral, and so it will self correct eventually. + + On workers the class tracks the last received position of the stream from + replication, and handles querying for missed updates over HTTP replication, + c.f. `get_current_token` and `get_replication_rows`. + """ + + # How long to keep entries in the queue for. Workers that are down for + # longer than this duration will miss out on older updates. + _KEEP_ITEMS_IN_QUEUE_FOR_MS = 5 * 60 * 1000 + + # How often to check if we can expire entries from the queue. + _CLEAR_ITEMS_EVERY_MS = 60 * 1000 + + def __init__(self, hs: "HomeServer", presence_handler: BasePresenceHandler): + self._clock = hs.get_clock() + self._notifier = hs.get_notifier() + self._instance_name = hs.get_instance_name() + self._presence_handler = presence_handler + self._repl_client = ReplicationGetStreamUpdates.make_client(hs) + + # Should we keep a queue of recent presence updates? We only bother if + # another process may be handling federation sending. + self._queue_presence_updates = True + + # Whether this instance is a presence writer. + self._presence_writer = self._instance_name in hs.config.worker.writers.presence + + # The FederationSender instance, if this process sends federation traffic directly. + self._federation = None + + if hs.should_send_federation(): + self._federation = hs.get_federation_sender() + + # We don't bother queuing up presence states if only this instance + # is sending federation. + if hs.config.worker.federation_shard_config.instances == [ + self._instance_name + ]: + self._queue_presence_updates = False + + # The queue of recently queued updates as tuples of: `(timestamp, + # stream_id, destinations, user_ids)`. We don't store the full states + # for efficiency, and remote workers will already have the full states + # cached. + self._queue = [] # type: List[Tuple[int, int, Collection[str], Set[str]]] + + self._next_id = 1 + + # Map from instance name to current token + self._current_tokens = {} # type: Dict[str, int] + + if self._queue_presence_updates: + self._clock.looping_call(self._clear_queue, self._CLEAR_ITEMS_EVERY_MS) + + def _clear_queue(self): + """Clear out older entries from the queue.""" + clear_before = self._clock.time_msec() - self._KEEP_ITEMS_IN_QUEUE_FOR_MS + + # The queue is sorted by timestamp, so we can bisect to find the right + # place to purge before. Note that we are searching using a 1-tuple with + # the time, which does The Right Thing since the queue is a tuple where + # the first item is a timestamp. + index = bisect(self._queue, (clear_before,)) + self._queue = self._queue[index:] + + def send_presence_to_destinations( + self, states: Collection[UserPresenceState], destinations: Collection[str] + ) -> None: + """Send the presence states to the given destinations. + + Will forward to the local federation sender (if there is one) and queue + to send over replication (if there are other federation sender instances.). + + Must only be called on the presence writer process. + """ + + # This should only be called on a presence writer. + assert self._presence_writer + + if self._federation: + self._federation.send_presence_to_destinations( + states=states, + destinations=destinations, + ) + + if not self._queue_presence_updates: + return + + now = self._clock.time_msec() + + stream_id = self._next_id + self._next_id += 1 + + self._queue.append((now, stream_id, destinations, {s.user_id for s in states})) + + self._notifier.notify_replication() + + def get_current_token(self, instance_name: str) -> int: + """Get the current position of the stream. + + On workers this returns the last stream ID received from replication. + """ + if instance_name == self._instance_name: + return self._next_id - 1 + else: + return self._current_tokens.get(instance_name, 0) + + async def get_replication_rows( + self, + instance_name: str, + from_token: int, + upto_token: int, + target_row_count: int, + ) -> Tuple[List[Tuple[int, Tuple[str, str]]], int, bool]: + """Get all the updates between the two tokens. + + We return rows in the form of `(destination, user_id)` to keep the size + of each row bounded (rather than returning the sets in a row). + + On workers this will query the presence writer process via HTTP replication. + """ + if instance_name != self._instance_name: + # If not local we query over http replication from the presence + # writer + result = await self._repl_client( + instance_name=instance_name, + stream_name=PresenceFederationStream.NAME, + from_token=from_token, + upto_token=upto_token, + ) + return result["updates"], result["upto_token"], result["limited"] + + # If the from_token is the current token then there's nothing to return + # and we can trivially no-op. + if from_token == self._next_id - 1: + return [], upto_token, False + + # We can find the correct position in the queue by noting that there is + # exactly one entry per stream ID, and that the last entry has an ID of + # `self._next_id - 1`, so we can count backwards from the end. + # + # Since we are returning all states in the range `from_token < stream_id + # <= upto_token` we look for the index with a `stream_id` of `from_token + # + 1`. + # + # Since the start of the queue is periodically truncated we need to + # handle the case where `from_token` stream ID has already been dropped. + start_idx = max(from_token + 1 - self._next_id, -len(self._queue)) + + to_send = [] # type: List[Tuple[int, Tuple[str, str]]] + limited = False + new_id = upto_token + for _, stream_id, destinations, user_ids in self._queue[start_idx:]: + if stream_id <= from_token: + # Paranoia check that we are actually only sending states that + # are have stream_id strictly greater than from_token. We should + # never hit this. + logger.warning( + "Tried returning presence federation stream ID: %d less than from_token: %d (next_id: %d, len: %d)", + stream_id, + from_token, + self._next_id, + len(self._queue), + ) + continue + + if stream_id > upto_token: + break + + new_id = stream_id + + to_send.extend( + (stream_id, (destination, user_id)) + for destination in destinations + for user_id in user_ids + ) + + if len(to_send) > target_row_count: + limited = True + break + + return to_send, new_id, limited + + async def process_replication_rows( + self, stream_name: str, instance_name: str, token: int, rows: list + ): + if stream_name != PresenceFederationStream.NAME: + return + + # We keep track of the current tokens (so that we can catch up with anything we missed after a disconnect) + self._current_tokens[instance_name] = token + + # If we're a federation sender we pull out the presence states to send + # and forward them on. + if not self._federation: + return + + hosts_to_users = {} # type: Dict[str, Set[str]] + for row in rows: + hosts_to_users.setdefault(row.destination, set()).add(row.user_id) + + for host, user_ids in hosts_to_users.items(): + states = await self._presence_handler.current_state_for_users(user_ids) + self._federation.send_presence_to_destinations( + states=states.values(), + destinations=[host], + ) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index ee8f868791..14a4786b7e 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index a54fe1968e..c679a8303e 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index dbfe9bfaca..f782d9db32 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 8d4a4612a3..3d4869067f 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f3da38a71e..1a99a0c827 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 01fade718e..9986459c31 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 89e95c8ae9..31c11280e8 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016-2020 The Matrix.org Foundation C.I.C. # Copyright 2020 Sorunome # @@ -20,7 +19,7 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple from synapse import types -from synapse.api.constants import AccountDataTypes, EventTypes, JoinRules, Membership +from synapse.api.constants import AccountDataTypes, EventTypes, Membership from synapse.api.errors import ( AuthError, Codes, @@ -29,7 +28,6 @@ from synapse.api.errors import ( SynapseError, ) from synapse.api.ratelimiting import Ratelimiter -from synapse.api.room_versions import RoomVersion from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.types import ( @@ -73,6 +71,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): self.profile_handler = hs.get_profile_handler() self.event_creation_handler = hs.get_event_creation_handler() self.account_data_handler = hs.get_account_data_handler() + self.event_auth_handler = hs.get_event_auth_handler() self.member_linearizer = Linearizer(name="member") @@ -226,62 +225,6 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): await self._invites_per_user_limiter.ratelimit(requester, invitee_user_id) - async def _can_join_without_invite( - self, state_ids: StateMap[str], room_version: RoomVersion, user_id: str - ) -> bool: - """ - Check whether a user can join a room without an invite. - - When joining a room with restricted joined rules (as defined in MSC3083), - the membership of spaces must be checked during join. - - Args: - state_ids: The state of the room as it currently is. - room_version: The room version of the room being joined. - user_id: The user joining the room. - - Returns: - True if the user can join the room, false otherwise. - """ - # This only applies to room versions which support the new join rule. - if not room_version.msc3083_join_rules: - return True - - # If there's no join rule, then it defaults to public (so this doesn't apply). - join_rules_event_id = state_ids.get((EventTypes.JoinRules, ""), None) - if not join_rules_event_id: - return True - - # If the join rule is not restricted, this doesn't apply. - join_rules_event = await self.store.get_event(join_rules_event_id) - if join_rules_event.content.get("join_rule") != JoinRules.MSC3083_RESTRICTED: - return True - - # If allowed is of the wrong form, then only allow invited users. - allowed_spaces = join_rules_event.content.get("allow", []) - if not isinstance(allowed_spaces, list): - return False - - # Get the list of joined rooms and see if there's an overlap. - joined_rooms = await self.store.get_rooms_for_user(user_id) - - # Pull out the other room IDs, invalid data gets filtered. - for space in allowed_spaces: - if not isinstance(space, dict): - continue - - space_id = space.get("space") - if not isinstance(space_id, str): - continue - - # The user was joined to one of the spaces specified, they can join - # this room! - if space_id in joined_rooms: - return True - - # The user was not in any of the required spaces. - return False - async def _local_membership_update( self, requester: Requester, @@ -350,7 +293,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): if ( newly_joined and not user_is_invited - and not await self._can_join_without_invite( + and not await self.event_auth_handler.can_join_without_invite( prev_state_ids, event.room_version, user_id ) ): diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 9f2bde25dd..3fdad8cdd8 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml.py index ec2ba11c75..80ba65b9e0 100644 --- a/synapse/handlers/saml_handler.py +++ b/synapse/handlers/saml.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index d742dfbd53..4e718d3f63 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 6243fab091..56207c7059 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2017-2018 New Vector Ltd -# Copyright 2019 The Matrix.org Foundation C.I.C. +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py index 5d9418969d..01e3e050f9 100644 --- a/synapse/handlers/space_summary.py +++ b/synapse/handlers/space_summary.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 415b1c2d17..044ff06d84 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,6 +18,7 @@ from typing import ( Any, Awaitable, Callable, + Collection, Dict, Iterable, List, @@ -41,7 +41,7 @@ from synapse.handlers.ui_auth import UIAuthSessionDataConstants from synapse.http import get_request_user_agent from synapse.http.server import respond_with_html, respond_with_redirect from synapse.http.site import SynapseRequest -from synapse.types import Collection, JsonDict, UserID, contains_invalid_mxid_characters +from synapse.types import JsonDict, UserID, contains_invalid_mxid_characters from synapse.util.async_helpers import Linearizer from synapse.util.stringutils import random_string diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py index ee8f87e59a..077c7c0649 100644 --- a/synapse/handlers/state_deltas.py +++ b/synapse/handlers/state_deltas.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 4227f8c1a2..761e956b6e 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2020 Sorunome # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 6d6d6ed0df..a1b0aee355 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018, 2019 New Vector Ltd # @@ -15,7 +14,17 @@ # limitations under the License. import itertools import logging -from typing import TYPE_CHECKING, Any, Dict, FrozenSet, List, Optional, Set, Tuple +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + FrozenSet, + List, + Optional, + Set, + Tuple, +) import attr from prometheus_client import Counter @@ -29,7 +38,6 @@ from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.roommember import MemberSummary from synapse.storage.state import StateFilter from synapse.types import ( - Collection, JsonDict, MutableStateMap, Requester, diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index bb35af099d..e22393adc4 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/ui_auth/__init__.py b/synapse/handlers/ui_auth/__init__.py index a68d5e790e..4c3b669fae 100644 --- a/synapse/handlers/ui_auth/__init__.py +++ b/synapse/handlers/ui_auth/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py index 3d66bf305e..0eeb7c03f2 100644 --- a/synapse/handlers/ui_auth/checkers.py +++ b/synapse/handlers/ui_auth/checkers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index b121286d95..dacc4f3076 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -45,7 +44,6 @@ class UserDirectoryHandler(StateDeltasHandler): super().__init__(hs) self.store = hs.get_datastore() - self.state = hs.get_state_handler() self.server_name = hs.hostname self.clock = hs.get_clock() self.notifier = hs.get_notifier() @@ -303,10 +301,12 @@ class UserDirectoryHandler(StateDeltasHandler): # ignore the change return - users_with_profile = await self.state.get_current_users_in_room(room_id) + other_users_in_room_with_profiles = ( + await self.store.get_users_in_room_with_profiles(room_id) + ) # Remove every user from the sharing tables for that room. - for user_id in users_with_profile.keys(): + for user_id in other_users_in_room_with_profiles.keys(): await self.store.remove_user_who_share_room(user_id, room_id) # Then, re-add them to the tables. @@ -315,7 +315,7 @@ class UserDirectoryHandler(StateDeltasHandler): # which when ran over an entire room, will result in the same values # being added multiple times. The batching upserts shouldn't make this # too bad, though. - for user_id, profile in users_with_profile.items(): + for user_id, profile in other_users_in_room_with_profiles.items(): await self._handle_new_user(room_id, user_id, profile) async def _handle_new_user( @@ -337,7 +337,7 @@ class UserDirectoryHandler(StateDeltasHandler): room_id ) # Now we update users who share rooms with users. - users_with_profile = await self.state.get_current_users_in_room(room_id) + other_users_in_room = await self.store.get_users_in_room(room_id) if is_public: await self.store.add_users_in_public_rooms(room_id, (user_id,)) @@ -353,14 +353,14 @@ class UserDirectoryHandler(StateDeltasHandler): # We don't care about appservice users. if not is_appservice: - for other_user_id in users_with_profile: + for other_user_id in other_users_in_room: if user_id == other_user_id: continue to_insert.add((user_id, other_user_id)) # Next we need to update for every local user in the room - for other_user_id in users_with_profile: + for other_user_id in other_users_in_room: if user_id == other_user_id: continue diff --git a/synapse/http/__init__.py b/synapse/http/__init__.py index 142b007d01..ed4671b7de 100644 --- a/synapse/http/__init__.py +++ b/synapse/http/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py index 479746c9c5..55ea97a07f 100644 --- a/synapse/http/additional_resource.py +++ b/synapse/http/additional_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/client.py b/synapse/http/client.py index f7a07f0466..5f40f16e24 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # @@ -34,6 +33,7 @@ import treq from canonicaljson import encode_canonical_json from netaddr import AddrFormatError, IPAddress, IPSet from prometheus_client import Counter +from typing_extensions import Protocol from zope.interface import implementer, provider from OpenSSL import SSL @@ -755,6 +755,16 @@ def _timeout_to_request_timed_out_error(f: Failure): return f +class ByteWriteable(Protocol): + """The type of object which must be passed into read_body_with_max_size. + + Typically this is a file object. + """ + + def write(self, data: bytes) -> int: + pass + + class BodyExceededMaxSize(Exception): """The maximum allowed size of the HTTP body was exceeded.""" @@ -791,7 +801,7 @@ class _ReadBodyWithMaxSizeProtocol(protocol.Protocol): transport = None # type: Optional[ITCPTransport] def __init__( - self, stream: BinaryIO, deferred: defer.Deferred, max_size: Optional[int] + self, stream: ByteWriteable, deferred: defer.Deferred, max_size: Optional[int] ): self.stream = stream self.deferred = deferred @@ -831,7 +841,7 @@ class _ReadBodyWithMaxSizeProtocol(protocol.Protocol): def read_body_with_max_size( - response: IResponse, stream: BinaryIO, max_size: Optional[int] + response: IResponse, stream: ByteWriteable, max_size: Optional[int] ) -> defer.Deferred: """ Read a HTTP response body to a file-object. Optionally enforcing a maximum file size. diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py index b797e3ce80..17e1c5abb1 100644 --- a/synapse/http/connectproxyclient.py +++ b/synapse/http/connectproxyclient.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/federation/__init__.py b/synapse/http/federation/__init__.py index 1453d04571..743fb9904a 100644 --- a/synapse/http/federation/__init__.py +++ b/synapse/http/federation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 5935a125fd..950770201a 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index d9620032d2..b8ed4ec905 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index ce4079f15c..20d39a4ea6 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index ab47dec8f2..bb837b7b19 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- -# Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. import cgi +import codecs import logging import random import sys +import typing import urllib.parse -from io import BytesIO +from io import BytesIO, StringIO from typing import Callable, Dict, List, Optional, Tuple, Union import attr @@ -73,6 +73,9 @@ incoming_responses_counter = Counter( "synapse_http_matrixfederationclient_responses", "", ["method", "code"] ) +# a federation response can be rather large (eg a big state_ids is 50M or so), so we +# need a generous limit here. +MAX_RESPONSE_SIZE = 100 * 1024 * 1024 MAX_LONG_RETRIES = 10 MAX_SHORT_RETRIES = 3 @@ -168,12 +171,27 @@ async def _handle_json_response( try: check_content_type_is_json(response.headers) - # Use the custom JSON decoder (partially re-implements treq.json_content). - d = treq.text_content(response, encoding="utf-8") - d.addCallback(json_decoder.decode) + buf = StringIO() + d = read_body_with_max_size(response, BinaryIOWrapper(buf), MAX_RESPONSE_SIZE) d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor) + def parse(_len: int): + return json_decoder.decode(buf.getvalue()) + + d.addCallback(parse) + body = await make_deferred_yieldable(d) + except BodyExceededMaxSize as e: + # The response was too big. + logger.warning( + "{%s} [%s] JSON response exceeded max size %i - %s %s", + request.txn_id, + request.destination, + MAX_RESPONSE_SIZE, + request.method, + request.uri.decode("ascii"), + ) + raise RequestSendFailed(e, can_retry=False) from e except ValueError as e: # The JSON content was invalid. logger.warning( @@ -219,6 +237,18 @@ async def _handle_json_response( return body +class BinaryIOWrapper: + """A wrapper for a TextIO which converts from bytes on the fly.""" + + def __init__(self, file: typing.TextIO, encoding="utf-8", errors="strict"): + self.decoder = codecs.getincrementaldecoder(encoding)(errors) + self.file = file + + def write(self, b: Union[bytes, bytearray]) -> int: + self.file.write(self.decoder.decode(b)) + return len(b) + + class MatrixFederationHttpClient: """HTTP client used to talk to other homeservers over the federation protocol. Send client certificates and signs requests. diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index ea5ad14cb0..7dfae8b786 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index 0ec5d941b8..602f93c497 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/http/server.py b/synapse/http/server.py index fa89260850..845651e606 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 839d58d0d4..fb5794e80b 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/http/site.py b/synapse/http/site.py index 32b5e19c09..671fd3fbcc 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -14,13 +14,14 @@ import contextlib import logging import time -from typing import Optional, Tuple, Type, Union +from typing import Optional, Tuple, Union import attr from zope.interface import implementer -from twisted.internet.interfaces import IAddress +from twisted.internet.interfaces import IAddress, IReactorTime from twisted.python.failure import Failure +from twisted.web.resource import IResource from twisted.web.server import Request, Site from synapse.config.server import ListenerConfig @@ -49,6 +50,7 @@ class SynapseRequest(Request): * Redaction of access_token query-params in __repr__ * Logging at start and end * Metrics to record CPU, wallclock and DB time by endpoint. + * A limit to the size of request which will be accepted It also provides a method `processing`, which returns a context manager. If this method is called, the request won't be logged until the context manager is closed; @@ -59,8 +61,9 @@ class SynapseRequest(Request): logcontext: the log context for this request """ - def __init__(self, channel, *args, **kw): + def __init__(self, channel, *args, max_request_body_size=1024, **kw): Request.__init__(self, channel, *args, **kw) + self._max_request_body_size = max_request_body_size self.site = channel.site # type: SynapseSite self._channel = channel # this is used by the tests self.start_time = 0.0 @@ -97,6 +100,18 @@ class SynapseRequest(Request): self.site.site_tag, ) + def handleContentChunk(self, data): + # we should have a `content` by now. + assert self.content, "handleContentChunk() called before gotLength()" + if self.content.tell() + len(data) > self._max_request_body_size: + logger.warning( + "Aborting connection from %s because the request exceeds maximum size", + self.client, + ) + self.transport.abortConnection() + return + super().handleContentChunk(data) + @property def requester(self) -> Optional[Union[Requester, str]]: return self._requester @@ -485,29 +500,55 @@ class _XForwardedForAddress: class SynapseSite(Site): """ - Subclass of a twisted http Site that does access logging with python's - standard logging + Synapse-specific twisted http Site + + This does two main things. + + First, it replaces the requestFactory in use so that we build SynapseRequests + instead of regular t.w.server.Requests. All of the constructor params are really + just parameters for SynapseRequest. + + Second, it inhibits the log() method called by Request.finish, since SynapseRequest + does its own logging. """ def __init__( self, - logger_name, - site_tag, + logger_name: str, + site_tag: str, config: ListenerConfig, - resource, + resource: IResource, server_version_string, - *args, - **kwargs, + max_request_body_size: int, + reactor: IReactorTime, ): - Site.__init__(self, resource, *args, **kwargs) + """ + + Args: + logger_name: The name of the logger to use for access logs. + site_tag: A tag to use for this site - mostly in access logs. + config: Configuration for the HTTP listener corresponding to this site + resource: The base of the resource tree to be used for serving requests on + this site + server_version_string: A string to present for the Server header + max_request_body_size: Maximum request body length to allow before + dropping the connection + reactor: reactor to be used to manage connection timeouts + """ + Site.__init__(self, resource, reactor=reactor) self.site_tag = site_tag assert config.http_options is not None proxied = config.http_options.x_forwarded - self.requestFactory = ( - XForwardedForRequest if proxied else SynapseRequest - ) # type: Type[Request] + request_class = XForwardedForRequest if proxied else SynapseRequest + + def request_factory(channel, queued) -> Request: + return request_class( + channel, max_request_body_size=max_request_body_size, queued=queued + ) + + self.requestFactory = request_factory # type: ignore self.access_logger = logging.getLogger(logger_name) self.server_version_string = server_version_string.encode("ascii") diff --git a/synapse/logging/__init__.py b/synapse/logging/__init__.py index b28b7b2ef7..e00969f8b1 100644 --- a/synapse/logging/__init__.py +++ b/synapse/logging/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py index 643492ceaf..c515690b38 100644 --- a/synapse/logging/_remote.py +++ b/synapse/logging/_remote.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -227,11 +226,11 @@ class RemoteHandler(logging.Handler): old_buffer = self._buffer self._buffer = deque() - for i in range(buffer_split): + for _ in range(buffer_split): self._buffer.append(old_buffer.popleft()) end_buffer = [] - for i in range(buffer_split): + for _ in range(buffer_split): end_buffer.append(old_buffer.pop()) self._buffer.extend(reversed(end_buffer)) diff --git a/synapse/logging/_structured.py b/synapse/logging/_structured.py index 3e054f615c..c7a971a9d6 100644 --- a/synapse/logging/_structured.py +++ b/synapse/logging/_structured.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/_terse_json.py b/synapse/logging/_terse_json.py index 2fbf5549a1..8002a250a2 100644 --- a/synapse/logging/_terse_json.py +++ b/synapse/logging/_terse_json.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/filter.py b/synapse/logging/filter.py index 1baf8dd679..ed51a4726c 100644 --- a/synapse/logging/filter.py +++ b/synapse/logging/filter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/formatter.py b/synapse/logging/formatter.py index 11f60a77f7..c0f12ecd15 100644 --- a/synapse/logging/formatter.py +++ b/synapse/logging/formatter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index bfe9136fd8..fba2fa3904 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/scopecontextmanager.py b/synapse/logging/scopecontextmanager.py index 7b9c657456..b1e8e08fe9 100644 --- a/synapse/logging/scopecontextmanager.py +++ b/synapse/logging/scopecontextmanager.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/logging/utils.py b/synapse/logging/utils.py index fd3543ab04..08895e72ee 100644 --- a/synapse/logging/utils.py +++ b/synapse/logging/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 13a5bc4558..31b7b3c256 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/metrics/_exposition.py b/synapse/metrics/_exposition.py index 71320a1402..8002be56e0 100644 --- a/synapse/metrics/_exposition.py +++ b/synapse/metrics/_exposition.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015-2019 Prometheus Python Client Developers # Copyright 2019 Matrix.org Foundation C.I.C. # diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 3f621539f3..714caf84c3 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 189f7e52ba..12c5ea0815 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -51,6 +50,7 @@ class ModuleApi: self._auth_handler = auth_handler self._server_name = hs.hostname self._presence_stream = hs.get_event_sources().sources["presence"] + self._state = hs.get_state_handler() # We expose these as properties below in order to attach a helpful docstring. self._http_client = hs.get_simple_http_client() # type: SimpleHttpClient @@ -430,11 +430,13 @@ class ModuleApi: UserID.from_string(user), from_key=None, include_offline=False ) - # Send to remote destinations - await make_deferred_yieldable( - # We pull the federation sender here as we can only do so on workers - # that support sending presence - self._hs.get_federation_sender().send_presence(presence_events) + # Send to remote destinations. + + # We pull out the presence handler here to break a cyclic + # dependency between the presence router and module API. + presence_handler = self._hs.get_presence_handler() + await presence_handler.maybe_send_presence_to_interested_destinations( + presence_events ) diff --git a/synapse/module_api/errors.py b/synapse/module_api/errors.py index b15441772c..d24864c549 100644 --- a/synapse/module_api/errors.py +++ b/synapse/module_api/errors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/notifier.py b/synapse/notifier.py index 7ce34380af..b9531007e2 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +17,7 @@ from collections import namedtuple from typing import ( Awaitable, Callable, + Collection, Dict, Iterable, List, @@ -43,13 +43,7 @@ from synapse.logging.opentracing import log_kv, start_active_span from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.streams.config import PaginationConfig -from synapse.types import ( - Collection, - PersistedEventPosition, - RoomStreamToken, - StreamToken, - UserID, -) +from synapse.types import PersistedEventPosition, RoomStreamToken, StreamToken, UserID from synapse.util.async_helpers import ObservableDeferred, timeout_deferred from synapse.util.metrics import Measure from synapse.visibility import filter_events_for_client diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 9fc3da49a2..2c23afe8e3 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 38a47a600f..60758df016 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 1897f59153..350646f458 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # Copyright 2017 New Vector Ltd # @@ -107,6 +106,10 @@ class BulkPushRuleEvaluator: self.store = hs.get_datastore() self.auth = hs.get_auth() + # Used by `RulesForRoom` to ensure only one thing mutates the cache at a + # time. Keyed off room_id. + self._rules_linearizer = Linearizer(name="rules_for_room") + self.room_push_rule_cache_metrics = register_cache( "cache", "room_push_rule_cache", @@ -124,7 +127,16 @@ class BulkPushRuleEvaluator: dict of user_id -> push_rules """ room_id = event.room_id - rules_for_room = self._get_rules_for_room(room_id) + + rules_for_room_data = self._get_rules_for_room(room_id) + rules_for_room = RulesForRoom( + hs=self.hs, + room_id=room_id, + rules_for_room_cache=self._get_rules_for_room.cache, + room_push_rule_cache_metrics=self.room_push_rule_cache_metrics, + linearizer=self._rules_linearizer, + cached_data=rules_for_room_data, + ) rules_by_user = await rules_for_room.get_rules(event, context) @@ -143,17 +155,12 @@ class BulkPushRuleEvaluator: return rules_by_user @lru_cache() - def _get_rules_for_room(self, room_id: str) -> "RulesForRoom": - """Get the current RulesForRoom object for the given room id""" - # It's important that RulesForRoom gets added to self._get_rules_for_room.cache + def _get_rules_for_room(self, room_id: str) -> "RulesForRoomData": + """Get the current RulesForRoomData object for the given room id""" + # It's important that the RulesForRoomData object gets added to self._get_rules_for_room.cache # before any lookup methods get called on it as otherwise there may be # a race if invalidate_all gets called (which assumes its in the cache) - return RulesForRoom( - self.hs, - room_id, - self._get_rules_for_room.cache, - self.room_push_rule_cache_metrics, - ) + return RulesForRoomData() async def _get_power_levels_and_sender_level( self, event: EventBase, context: EventContext @@ -283,11 +290,49 @@ def _condition_checker( return True +@attr.s(slots=True) +class RulesForRoomData: + """The data stored in the cache by `RulesForRoom`. + + We don't store `RulesForRoom` directly in the cache as we want our caches to + *only* include data, and not references to e.g. the data stores. + """ + + # event_id -> (user_id, state) + member_map = attr.ib(type=Dict[str, Tuple[str, str]], factory=dict) + # user_id -> rules + rules_by_user = attr.ib(type=Dict[str, List[Dict[str, dict]]], factory=dict) + + # The last state group we updated the caches for. If the state_group of + # a new event comes along, we know that we can just return the cached + # result. + # On invalidation of the rules themselves (if the user changes them), + # we invalidate everything and set state_group to `object()` + state_group = attr.ib(type=Union[object, int], factory=object) + + # A sequence number to keep track of when we're allowed to update the + # cache. We bump the sequence number when we invalidate the cache. If + # the sequence number changes while we're calculating stuff we should + # not update the cache with it. + sequence = attr.ib(type=int, default=0) + + # A cache of user_ids that we *know* aren't interesting, e.g. user_ids + # owned by AS's, or remote users, etc. (I.e. users we will never need to + # calculate push for) + # These never need to be invalidated as we will never set up push for + # them. + uninteresting_user_set = attr.ib(type=Set[str], factory=set) + + class RulesForRoom: """Caches push rules for users in a room. This efficiently handles users joining/leaving the room by not invalidating the entire cache for the room. + + A new instance is constructed for each call to + `BulkPushRuleEvaluator._get_rules_for_event`, with the cached data from + previous calls passed in. """ def __init__( @@ -296,6 +341,8 @@ class RulesForRoom: room_id: str, rules_for_room_cache: LruCache, room_push_rule_cache_metrics: CacheMetric, + linearizer: Linearizer, + cached_data: RulesForRoomData, ): """ Args: @@ -304,38 +351,21 @@ class RulesForRoom: rules_for_room_cache: The cache object that caches these RoomsForUser objects. room_push_rule_cache_metrics: The metrics object + linearizer: The linearizer used to ensure only one thing mutates + the cache at a time. Keyed off room_id + cached_data: Cached data from previous calls to `self.get_rules`, + can be mutated. """ self.room_id = room_id self.is_mine_id = hs.is_mine_id self.store = hs.get_datastore() self.room_push_rule_cache_metrics = room_push_rule_cache_metrics - self.linearizer = Linearizer(name="rules_for_room") - - # event_id -> (user_id, state) - self.member_map = {} # type: Dict[str, Tuple[str, str]] - # user_id -> rules - self.rules_by_user = {} # type: Dict[str, List[Dict[str, dict]]] - - # The last state group we updated the caches for. If the state_group of - # a new event comes along, we know that we can just return the cached - # result. - # On invalidation of the rules themselves (if the user changes them), - # we invalidate everything and set state_group to `object()` - self.state_group = object() - - # A sequence number to keep track of when we're allowed to update the - # cache. We bump the sequence number when we invalidate the cache. If - # the sequence number changes while we're calculating stuff we should - # not update the cache with it. - self.sequence = 0 - - # A cache of user_ids that we *know* aren't interesting, e.g. user_ids - # owned by AS's, or remote users, etc. (I.e. users we will never need to - # calculate push for) - # These never need to be invalidated as we will never set up push for - # them. - self.uninteresting_user_set = set() # type: Set[str] + # Used to ensure only one thing mutates the cache at a time. Keyed off + # room_id. + self.linearizer = linearizer + + self.data = cached_data # We need to be clever on the invalidating caches callbacks, as # otherwise the invalidation callback holds a reference to the object, @@ -353,25 +383,25 @@ class RulesForRoom: """ state_group = context.state_group - if state_group and self.state_group == state_group: + if state_group and self.data.state_group == state_group: logger.debug("Using cached rules for %r", self.room_id) self.room_push_rule_cache_metrics.inc_hits() - return self.rules_by_user + return self.data.rules_by_user - with (await self.linearizer.queue(())): - if state_group and self.state_group == state_group: + with (await self.linearizer.queue(self.room_id)): + if state_group and self.data.state_group == state_group: logger.debug("Using cached rules for %r", self.room_id) self.room_push_rule_cache_metrics.inc_hits() - return self.rules_by_user + return self.data.rules_by_user self.room_push_rule_cache_metrics.inc_misses() ret_rules_by_user = {} missing_member_event_ids = {} - if state_group and self.state_group == context.prev_group: + if state_group and self.data.state_group == context.prev_group: # If we have a simple delta then we can reuse most of the previous # results. - ret_rules_by_user = self.rules_by_user + ret_rules_by_user = self.data.rules_by_user current_state_ids = context.delta_ids push_rules_delta_state_cache_metric.inc_hits() @@ -394,24 +424,24 @@ class RulesForRoom: if typ != EventTypes.Member: continue - if user_id in self.uninteresting_user_set: + if user_id in self.data.uninteresting_user_set: continue if not self.is_mine_id(user_id): - self.uninteresting_user_set.add(user_id) + self.data.uninteresting_user_set.add(user_id) continue if self.store.get_if_app_services_interested_in_user(user_id): - self.uninteresting_user_set.add(user_id) + self.data.uninteresting_user_set.add(user_id) continue event_id = current_state_ids[key] - res = self.member_map.get(event_id, None) + res = self.data.member_map.get(event_id, None) if res: user_id, state = res if state == Membership.JOIN: - rules = self.rules_by_user.get(user_id, None) + rules = self.data.rules_by_user.get(user_id, None) if rules: ret_rules_by_user[user_id] = rules continue @@ -431,7 +461,7 @@ class RulesForRoom: else: # The push rules didn't change but lets update the cache anyway self.update_cache( - self.sequence, + self.data.sequence, members={}, # There were no membership changes rules_by_user=ret_rules_by_user, state_group=state_group, @@ -462,7 +492,7 @@ class RulesForRoom: for. Used when updating the cache. event: The event we are currently computing push rules for. """ - sequence = self.sequence + sequence = self.data.sequence rows = await self.store.get_membership_from_event_ids(member_event_ids.values()) @@ -502,23 +532,11 @@ class RulesForRoom: self.update_cache(sequence, members, ret_rules_by_user, state_group) - def invalidate_all(self) -> None: - # Note: Don't hand this function directly to an invalidation callback - # as it keeps a reference to self and will stop this instance from being - # GC'd if it gets dropped from the rules_to_user cache. Instead use - # `self.invalidate_all_cb` - logger.debug("Invalidating RulesForRoom for %r", self.room_id) - self.sequence += 1 - self.state_group = object() - self.member_map = {} - self.rules_by_user = {} - push_rules_invalidation_counter.inc() - def update_cache(self, sequence, members, rules_by_user, state_group) -> None: - if sequence == self.sequence: - self.member_map.update(members) - self.rules_by_user = rules_by_user - self.state_group = state_group + if sequence == self.data.sequence: + self.data.member_map.update(members) + self.data.rules_by_user = rules_by_user + self.data.state_group = state_group @attr.attrs(slots=True, frozen=True) @@ -536,6 +554,10 @@ class _Invalidation: room_id = attr.ib(type=str) def __call__(self) -> None: - rules = self.cache.get(self.room_id, None, update_metrics=False) - if rules: - rules.invalidate_all() + rules_data = self.cache.get(self.room_id, None, update_metrics=False) + if rules_data: + rules_data.sequence += 1 + rules_data.state_group = object() + rules_data.member_map = {} + rules_data.rules_by_user = {} + push_rules_invalidation_counter.inc() diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 0cadba761a..2ee0ccd58a 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index c0968dc7a1..99a18874d1 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,8 +19,9 @@ from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.internet.interfaces import IDelayedCall from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.push import Pusher, PusherConfig, ThrottleParams +from synapse.push import Pusher, PusherConfig, PusherConfigException, ThrottleParams from synapse.push.mailer import Mailer +from synapse.util.threepids import validate_email if TYPE_CHECKING: from synapse.server import HomeServer @@ -72,6 +72,12 @@ class EmailPusher(Pusher): self._is_processing = False + # Make sure that the email is valid. + try: + validate_email(self.email) + except ValueError: + raise PusherConfigException("Invalid email") + def on_started(self, should_check_for_notifs: bool) -> None: """Called when this pusher has been started. diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 26af5309c1..06bf5f8ada 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 New Vector Ltd # diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 2e5161de2c..c4b43b0d3f 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 04c2c1482c..412941393f 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index ba1877adcd..49ecb38522 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 New Vector Ltd # diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index df34103224..9c85200c0f 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index cb94127850..c51938b8cf 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 2534ecb1d4..579fcdf472 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -63,7 +62,9 @@ class PusherPool: self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() - self._account_validity_enabled = hs.config.account_validity_enabled + self._account_validity_enabled = ( + hs.config.account_validity.account_validity_enabled + ) # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.push.pusher_shard_config diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 2a1c925ee8..2de946f464 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -85,7 +85,7 @@ REQUIREMENTS = [ "typing-extensions>=3.7.4", # We enforce that we have a `cryptography` version that bundles an `openssl` # with the latest security patches. - "cryptography>=3.4.7;python_version>='3.6'", + "cryptography>=3.4.7", ] CONDITIONAL_REQUIREMENTS = { @@ -100,14 +100,9 @@ CONDITIONAL_REQUIREMENTS = { # that use the protocol, such as Let's Encrypt. "acme": [ "txacme>=0.9.2", - # txacme depends on eliot. Eliot 1.8.0 is incompatible with - # python 3.5.2, as per https://github.com/itamarst/eliot/issues/418 - "eliot<1.8.0;python_version<'3.5.3'", ], "saml2": [ - # pysaml2 6.4.0 is incompatible with Python 3.5 (see https://github.com/IdentityPython/pysaml2/issues/749) - "pysaml2>=4.5.0,<6.4.0;python_version<'3.6'", - "pysaml2>=4.5.0;python_version>='3.6'", + "pysaml2>=4.5.0", ], "oidc": ["authlib>=0.14.0"], # systemd-python is necessary for logging to the systemd journal via diff --git a/synapse/replication/__init__.py b/synapse/replication/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/synapse/replication/__init__.py +++ b/synapse/replication/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index cb4a52dbe9..ba8114ac9e 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index b7aa0c280f..5685cf2121 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -159,7 +158,10 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): def make_client(cls, hs): """Create a client that makes requests. - Returns a callable that accepts the same parameters as `_serialize_payload`. + Returns a callable that accepts the same parameters as + `_serialize_payload`, and also accepts an optional `instance_name` + parameter to specify which instance to hit (the instance must be in + the `instance_map` config). """ clock = hs.get_clock() client = hs.get_simple_http_client() diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py index 60899b6ad6..70e951af63 100644 --- a/synapse/replication/http/account_data.py +++ b/synapse/replication/http/account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/devices.py b/synapse/replication/http/devices.py index 807b85d2e1..5a5818ef61 100644 --- a/synapse/replication/http/devices.py +++ b/synapse/replication/http/devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 82ea3b895f..79cadb7b57 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 4ec1bfa6ea..c2e8c00293 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 2812ac12fc..bd03030b4b 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/presence.py b/synapse/replication/http/presence.py index bc9aa82cb4..f25307620d 100644 --- a/synapse/replication/http/presence.py +++ b/synapse/replication/http/presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/push.py b/synapse/replication/http/push.py index 054ed64d34..139427cb1f 100644 --- a/synapse/replication/http/push.py +++ b/synapse/replication/http/push.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py index 73d7477854..d6dd7242eb 100644 --- a/synapse/replication/http/register.py +++ b/synapse/replication/http/register.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index a4c5b44292..fae5ffa451 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/http/streams.py b/synapse/replication/http/streams.py index 309159e304..9afa147d00 100644 --- a/synapse/replication/http/streams.py +++ b/synapse/replication/http/streams.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/__init__.py b/synapse/replication/slave/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/synapse/replication/slave/__init__.py +++ b/synapse/replication/slave/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/__init__.py b/synapse/replication/slave/storage/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/synapse/replication/slave/storage/__init__.py +++ b/synapse/replication/slave/storage/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py index 693c9ab901..faa99387a7 100644 --- a/synapse/replication/slave/storage/_base.py +++ b/synapse/replication/slave/storage/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/_slaved_id_tracker.py b/synapse/replication/slave/storage/_slaved_id_tracker.py index 0d39a93ed2..2cb7489047 100644 --- a/synapse/replication/slave/storage/_slaved_id_tracker.py +++ b/synapse/replication/slave/storage/_slaved_id_tracker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/account_data.py b/synapse/replication/slave/storage/account_data.py index 21afe5f155..ee74ee7d85 100644 --- a/synapse/replication/slave/storage/account_data.py +++ b/synapse/replication/slave/storage/account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/appservice.py b/synapse/replication/slave/storage/appservice.py index 0f8d7037bd..29f50c0add 100644 --- a/synapse/replication/slave/storage/appservice.py +++ b/synapse/replication/slave/storage/appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/client_ips.py b/synapse/replication/slave/storage/client_ips.py index 0f5b7adef7..8730966380 100644 --- a/synapse/replication/slave/storage/client_ips.py +++ b/synapse/replication/slave/storage/client_ips.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/deviceinbox.py b/synapse/replication/slave/storage/deviceinbox.py index 1260f6d141..e940751084 100644 --- a/synapse/replication/slave/storage/deviceinbox.py +++ b/synapse/replication/slave/storage/deviceinbox.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py index e0d86240dd..70207420a6 100644 --- a/synapse/replication/slave/storage/devices.py +++ b/synapse/replication/slave/storage/devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/directory.py b/synapse/replication/slave/storage/directory.py index 1945bcf9a8..71fde0c96c 100644 --- a/synapse/replication/slave/storage/directory.py +++ b/synapse/replication/slave/storage/directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index fbffe6d85c..d4d3f8c448 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/filtering.py b/synapse/replication/slave/storage/filtering.py index 6a23252861..37875bc973 100644 --- a/synapse/replication/slave/storage/filtering.py +++ b/synapse/replication/slave/storage/filtering.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/groups.py b/synapse/replication/slave/storage/groups.py index 30955bcbfe..e9bdc38470 100644 --- a/synapse/replication/slave/storage/groups.py +++ b/synapse/replication/slave/storage/groups.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/keys.py b/synapse/replication/slave/storage/keys.py index 961579751c..a00b38c512 100644 --- a/synapse/replication/slave/storage/keys.py +++ b/synapse/replication/slave/storage/keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/presence.py b/synapse/replication/slave/storage/presence.py deleted file mode 100644 index 55620c03d8..0000000000 --- a/synapse/replication/slave/storage/presence.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse.replication.tcp.streams import PresenceStream -from synapse.storage import DataStore -from synapse.storage.database import DatabasePool -from synapse.storage.databases.main.presence import PresenceStore -from synapse.util.caches.stream_change_cache import StreamChangeCache - -from ._base import BaseSlavedStore -from ._slaved_id_tracker import SlavedIdTracker - - -class SlavedPresenceStore(BaseSlavedStore): - def __init__(self, database: DatabasePool, db_conn, hs): - super().__init__(database, db_conn, hs) - self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id") - - self._presence_on_startup = self._get_active_presence(db_conn) # type: ignore - - self.presence_stream_cache = StreamChangeCache( - "PresenceStreamChangeCache", self._presence_id_gen.get_current_token() - ) - - _get_active_presence = DataStore._get_active_presence - take_presence_startup_info = DataStore.take_presence_startup_info - _get_presence_for_user = PresenceStore.__dict__["_get_presence_for_user"] - get_presence_for_users = PresenceStore.__dict__["get_presence_for_users"] - - def get_current_presence_token(self): - return self._presence_id_gen.get_current_token() - - def process_replication_rows(self, stream_name, instance_name, token, rows): - if stream_name == PresenceStream.NAME: - self._presence_id_gen.advance(instance_name, token) - for row in rows: - self.presence_stream_cache.entity_has_changed(row.user_id, token) - self._get_presence_for_user.invalidate((row.user_id,)) - return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/replication/slave/storage/profile.py b/synapse/replication/slave/storage/profile.py index f85b20a071..99f4a22642 100644 --- a/synapse/replication/slave/storage/profile.py +++ b/synapse/replication/slave/storage/profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py index de904c943c..4d5f862862 100644 --- a/synapse/replication/slave/storage/push_rule.py +++ b/synapse/replication/slave/storage/push_rule.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py index 93161c3dfb..2672a2c94b 100644 --- a/synapse/replication/slave/storage/pushers.py +++ b/synapse/replication/slave/storage/pushers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/receipts.py b/synapse/replication/slave/storage/receipts.py index 3dfdd9961d..3826b87dec 100644 --- a/synapse/replication/slave/storage/receipts.py +++ b/synapse/replication/slave/storage/receipts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/replication/slave/storage/registration.py b/synapse/replication/slave/storage/registration.py index a40f064e2b..5dae35a960 100644 --- a/synapse/replication/slave/storage/registration.py +++ b/synapse/replication/slave/storage/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/room.py b/synapse/replication/slave/storage/room.py index 109ac6bea1..8cc6de3f46 100644 --- a/synapse/replication/slave/storage/room.py +++ b/synapse/replication/slave/storage/room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py index 2091ac0df6..a59e543924 100644 --- a/synapse/replication/slave/storage/transactions.py +++ b/synapse/replication/slave/storage/transactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/__init__.py b/synapse/replication/tcp/__init__.py index 1b8718b11d..1fa60af8e6 100644 --- a/synapse/replication/tcp/__init__.py +++ b/synapse/replication/tcp/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 3455839d67..4f3c6a18b6 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,22 +14,35 @@ """A replication client for use by synapse workers. """ import logging -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple from twisted.internet.defer import Deferred from twisted.internet.protocol import ReconnectingClientFactory from synapse.api.constants import EventTypes +from synapse.federation import send_queue +from synapse.federation.sender import FederationSender from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol -from synapse.replication.tcp.streams import TypingStream +from synapse.replication.tcp.streams import ( + AccountDataStream, + DeviceListsStream, + GroupServerStream, + PushersStream, + PushRulesStream, + ReceiptsStream, + TagAccountDataStream, + ToDeviceStream, + TypingStream, +) from synapse.replication.tcp.streams.events import ( EventsStream, EventsStreamEventRow, EventsStreamRow, ) -from synapse.types import PersistedEventPosition, UserID -from synapse.util.async_helpers import timeout_deferred +from synapse.types import PersistedEventPosition, ReadReceipt, UserID +from synapse.util.async_helpers import Linearizer, timeout_deferred from synapse.util.metrics import Measure if TYPE_CHECKING: @@ -106,6 +118,14 @@ class ReplicationDataHandler: self._instance_name = hs.get_instance_name() self._typing_handler = hs.get_typing_handler() + self._notify_pushers = hs.config.start_pushers + self._pusher_pool = hs.get_pusherpool() + self._presence_handler = hs.get_presence_handler() + + self.send_handler = None # type: Optional[FederationSenderHandler] + if hs.should_send_federation(): + self.send_handler = FederationSenderHandler(hs) + # Map from stream to list of deferreds waiting for the stream to # arrive at a particular position. The lists are sorted by stream position. self._streams_to_waiters = {} # type: Dict[str, List[Tuple[int, Deferred]]] @@ -126,13 +146,51 @@ class ReplicationDataHandler: """ self.store.process_replication_rows(stream_name, instance_name, token, rows) + if self.send_handler: + await self.send_handler.process_replication_rows(stream_name, token, rows) + if stream_name == TypingStream.NAME: self._typing_handler.process_replication_rows(token, rows) self.notifier.on_new_event( "typing_key", token, rooms=[row.room_id for row in rows] ) - - if stream_name == EventsStream.NAME: + elif stream_name == PushRulesStream.NAME: + self.notifier.on_new_event( + "push_rules_key", token, users=[row.user_id for row in rows] + ) + elif stream_name in (AccountDataStream.NAME, TagAccountDataStream.NAME): + self.notifier.on_new_event( + "account_data_key", token, users=[row.user_id for row in rows] + ) + elif stream_name == ReceiptsStream.NAME: + self.notifier.on_new_event( + "receipt_key", token, rooms=[row.room_id for row in rows] + ) + await self._pusher_pool.on_new_receipts( + token, token, {row.room_id for row in rows} + ) + elif stream_name == ToDeviceStream.NAME: + entities = [row.entity for row in rows if row.entity.startswith("@")] + if entities: + self.notifier.on_new_event("to_device_key", token, users=entities) + elif stream_name == DeviceListsStream.NAME: + all_room_ids = set() # type: Set[str] + for row in rows: + if row.entity.startswith("@"): + room_ids = await self.store.get_rooms_for_user(row.entity) + all_room_ids.update(room_ids) + self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids) + elif stream_name == GroupServerStream.NAME: + self.notifier.on_new_event( + "groups_key", token, users=[row.user_id for row in rows] + ) + elif stream_name == PushersStream.NAME: + for row in rows: + if row.deleted: + self.stop_pusher(row.user_id, row.app_id, row.pushkey) + else: + await self.start_pusher(row.user_id, row.app_id, row.pushkey) + elif stream_name == EventsStream.NAME: # We shouldn't get multiple rows per token for events stream, so # we don't need to optimise this for multiple rows. for row in rows: @@ -160,6 +218,10 @@ class ReplicationDataHandler: membership=row.data.membership, ) + await self._presence_handler.process_replication_rows( + stream_name, instance_name, token, rows + ) + # Notify any waiting deferreds. The list is ordered by position so we # just iterate through the list until we reach a position that is # greater than the received row position. @@ -191,7 +253,7 @@ class ReplicationDataHandler: waiting_list[:] = waiting_list[index_of_first_deferred_not_called:] async def on_position(self, stream_name: str, instance_name: str, token: int): - self.store.process_replication_rows(stream_name, instance_name, token, []) + await self.on_rdata(stream_name, instance_name, token, []) # We poke the generic "replication" notifier to wake anything up that # may be streaming. @@ -200,6 +262,11 @@ class ReplicationDataHandler: def on_remote_server_up(self, server: str): """Called when get a new REMOTE_SERVER_UP command.""" + # Let's wake up the transaction queue for the server in case we have + # pending stuff to send to it. + if self.send_handler: + self.send_handler.wake_destination(server) + async def wait_for_stream_position( self, instance_name: str, stream_name: str, position: int ): @@ -236,3 +303,153 @@ class ReplicationDataHandler: logger.info( "Finished waiting for repl stream %r to reach %s", stream_name, position ) + + def stop_pusher(self, user_id, app_id, pushkey): + if not self._notify_pushers: + return + + key = "%s:%s" % (app_id, pushkey) + pushers_for_user = self._pusher_pool.pushers.get(user_id, {}) + pusher = pushers_for_user.pop(key, None) + if pusher is None: + return + logger.info("Stopping pusher %r / %r", user_id, key) + pusher.on_stop() + + async def start_pusher(self, user_id, app_id, pushkey): + if not self._notify_pushers: + return + + key = "%s:%s" % (app_id, pushkey) + logger.info("Starting pusher %r / %r", user_id, key) + return await self._pusher_pool.start_pusher_by_id(app_id, pushkey, user_id) + + +class FederationSenderHandler: + """Processes the fedration replication stream + + This class is only instantiate on the worker responsible for sending outbound + federation transactions. It receives rows from the replication stream and forwards + the appropriate entries to the FederationSender class. + """ + + def __init__(self, hs: "HomeServer"): + assert hs.should_send_federation() + + self.store = hs.get_datastore() + self._is_mine_id = hs.is_mine_id + self._hs = hs + + # We need to make a temporary value to ensure that mypy picks up the + # right type. We know we should have a federation sender instance since + # `should_send_federation` is True. + sender = hs.get_federation_sender() + assert isinstance(sender, FederationSender) + self.federation_sender = sender + + # Stores the latest position in the federation stream we've gotten up + # to. This is always set before we use it. + self.federation_position = None # type: Optional[int] + + self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer") + + def wake_destination(self, server: str): + self.federation_sender.wake_destination(server) + + async def process_replication_rows(self, stream_name, token, rows): + # The federation stream contains things that we want to send out, e.g. + # presence, typing, etc. + if stream_name == "federation": + send_queue.process_rows_for_federation(self.federation_sender, rows) + await self.update_token(token) + + # ... and when new receipts happen + elif stream_name == ReceiptsStream.NAME: + await self._on_new_receipts(rows) + + # ... as well as device updates and messages + elif stream_name == DeviceListsStream.NAME: + # The entities are either user IDs (starting with '@') whose devices + # have changed, or remote servers that we need to tell about + # changes. + hosts = {row.entity for row in rows if not row.entity.startswith("@")} + for host in hosts: + self.federation_sender.send_device_messages(host) + + elif stream_name == ToDeviceStream.NAME: + # The to_device stream includes stuff to be pushed to both local + # clients and remote servers, so we ignore entities that start with + # '@' (since they'll be local users rather than destinations). + hosts = {row.entity for row in rows if not row.entity.startswith("@")} + for host in hosts: + self.federation_sender.send_device_messages(host) + + async def _on_new_receipts(self, rows): + """ + Args: + rows (Iterable[synapse.replication.tcp.streams.ReceiptsStream.ReceiptsStreamRow]): + new receipts to be processed + """ + for receipt in rows: + # we only want to send on receipts for our own users + if not self._is_mine_id(receipt.user_id): + continue + receipt_info = ReadReceipt( + receipt.room_id, + receipt.receipt_type, + receipt.user_id, + [receipt.event_id], + receipt.data, + ) + await self.federation_sender.send_read_receipt(receipt_info) + + async def update_token(self, token): + """Update the record of where we have processed to in the federation stream. + + Called after we have processed a an update received over replication. Sends + a FEDERATION_ACK back to the master, and stores the token that we have processed + in `federation_stream_position` so that we can restart where we left off. + """ + self.federation_position = token + + # We save and send the ACK to master asynchronously, so we don't block + # processing on persistence. We don't need to do this operation for + # every single RDATA we receive, we just need to do it periodically. + + if self._fed_position_linearizer.is_queued(None): + # There is already a task queued up to save and send the token, so + # no need to queue up another task. + return + + run_as_background_process("_save_and_send_ack", self._save_and_send_ack) + + async def _save_and_send_ack(self): + """Save the current federation position in the database and send an ACK + to master with where we're up to. + """ + # We should only be calling this once we've got a token. + assert self.federation_position is not None + + try: + # We linearize here to ensure we don't have races updating the token + # + # XXX this appears to be redundant, since the ReplicationCommandHandler + # has a linearizer which ensures that we only process one line of + # replication data at a time. Should we remove it, or is it doing useful + # service for robustness? Or could we replace it with an assertion that + # we're not being re-entered? + + with (await self._fed_position_linearizer.queue(None)): + # We persist and ack the same position, so we take a copy of it + # here as otherwise it can get modified from underneath us. + current_position = self.federation_position + + await self.store.update_federation_out_pos( + "federation", current_position + ) + + # We ACK this token over replication so that the master can drop + # its in memory queues + self._hs.get_tcp_replication().send_federation_ack(current_position) + except Exception: + logger.exception("Error updating federation stream position") diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index 8abed1f52d..505d450e19 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index d89a36f25a..1a3b051e3c 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index a8894beadf..7ced4c543c 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -56,6 +55,8 @@ from synapse.replication.tcp.streams import ( CachesStream, EventsStream, FederationStream, + PresenceFederationStream, + PresenceStream, ReceiptsStream, Stream, TagAccountDataStream, @@ -100,6 +101,10 @@ class ReplicationCommandHandler: self._instance_id = hs.get_instance_id() self._instance_name = hs.get_instance_name() + self._is_presence_writer = ( + hs.get_instance_name() in hs.config.worker.writers.presence + ) + self._streams = { stream.NAME: stream(hs) for stream in STREAMS_MAP.values() } # type: Dict[str, Stream] @@ -154,6 +159,14 @@ class ReplicationCommandHandler: continue + if isinstance(stream, (PresenceStream, PresenceFederationStream)): + # Only add PresenceStream as a source on the instance in charge + # of presence. + if self._is_presence_writer: + self._streams_to_replicate.append(stream) + + continue + # Only add any other streams if we're on master. if hs.config.worker_app is not None: continue @@ -351,7 +364,7 @@ class ReplicationCommandHandler: ) -> Optional[Awaitable[None]]: user_sync_counter.inc() - if self._is_master: + if self._is_presence_writer: return self._presence_handler.update_external_syncs_row( cmd.instance_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms ) @@ -361,7 +374,7 @@ class ReplicationCommandHandler: def on_CLEAR_USER_SYNC( self, conn: IReplicationConnection, cmd: ClearUserSyncsCommand ) -> Optional[Awaitable[None]]: - if self._is_master: + if self._is_presence_writer: return self._presence_handler.update_external_syncs_clear(cmd.instance_id) else: return None diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index d10d574246..6e3705364f 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -50,7 +49,7 @@ import fcntl import logging import struct from inspect import isawaitable -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Collection, List, Optional from prometheus_client import Counter from zope.interface import Interface, implementer @@ -77,7 +76,6 @@ from synapse.replication.tcp.commands import ( ServerCommand, parse_command_from_line, ) -from synapse.types import Collection from synapse.util import Clock from synapse.util.stringutils import random_string diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py index 98bdeb0ec6..6a2c2655e4 100644 --- a/synapse/replication/tcp/redis.py +++ b/synapse/replication/tcp/redis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 2018f9f29e..bd47d84258 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/replication/tcp/streams/__init__.py b/synapse/replication/tcp/streams/__init__.py index d1a61c3314..4c0023c68a 100644 --- a/synapse/replication/tcp/streams/__init__.py +++ b/synapse/replication/tcp/streams/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2019 New Vector Ltd # @@ -31,6 +30,7 @@ from synapse.replication.tcp.streams._base import ( CachesStream, DeviceListsStream, GroupServerStream, + PresenceFederationStream, PresenceStream, PublicRoomsStream, PushersStream, @@ -51,6 +51,7 @@ STREAMS_MAP = { EventsStream, BackfillStream, PresenceStream, + PresenceFederationStream, TypingStream, ReceiptsStream, PushRulesStream, @@ -72,6 +73,7 @@ __all__ = [ "Stream", "BackfillStream", "PresenceStream", + "PresenceFederationStream", "TypingStream", "ReceiptsStream", "PushRulesStream", diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index 3dfee76743..b03824925a 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2019 New Vector Ltd # @@ -273,15 +272,22 @@ class PresenceStream(Stream): NAME = "presence" ROW_TYPE = PresenceStreamRow - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): store = hs.get_datastore() - if hs.config.worker_app is None: - # on the master, query the presence handler + if hs.get_instance_name() in hs.config.worker.writers.presence: + # on the presence writer, query the presence handler presence_handler = hs.get_presence_handler() - update_function = presence_handler.get_all_presence_updates + + from synapse.handlers.presence import PresenceHandler + + assert isinstance(presence_handler, PresenceHandler) + + update_function = ( + presence_handler.get_all_presence_updates + ) # type: UpdateFunction else: - # Query master process + # Query presence writer process update_function = make_http_update_function(hs, self.NAME) super().__init__( @@ -291,6 +297,30 @@ class PresenceStream(Stream): ) +class PresenceFederationStream(Stream): + """A stream used to send ad hoc presence updates over federation. + + Streams the remote destination and the user ID of the presence state to + send. + """ + + @attr.s(slots=True, auto_attribs=True) + class PresenceFederationStreamRow: + destination: str + user_id: str + + NAME = "presence_federation" + ROW_TYPE = PresenceFederationStreamRow + + def __init__(self, hs: "HomeServer"): + federation_queue = hs.get_presence_handler().get_federation_queue() + super().__init__( + hs.get_instance_name(), + federation_queue.get_current_token, + federation_queue.get_replication_rows, + ) + + class TypingStream(Stream): TypingStreamRow = namedtuple( "TypingStreamRow", ("room_id", "user_ids") # str # list(str) diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py index fa5e37ba7b..e7e87bac92 100644 --- a/synapse/replication/tcp/streams/events.py +++ b/synapse/replication/tcp/streams/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2019 New Vector Ltd # diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py index 9bb8e9e177..096a85d363 100644 --- a/synapse/replication/tcp/streams/federation.py +++ b/synapse/replication/tcp/streams/federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2019 New Vector Ltd # diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index ee3a9af569..3591298150 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 2dec818a5f..9cb9a9f6aa 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # Copyright 2020, 2021 The Matrix.org Foundation C.I.C. diff --git a/synapse/rest/admin/_base.py b/synapse/rest/admin/_base.py index 7681e55b58..f203f6fdc6 100644 --- a/synapse/rest/admin/_base.py +++ b/synapse/rest/admin/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py index 5996de11c3..5715190a78 100644 --- a/synapse/rest/admin/devices.py +++ b/synapse/rest/admin/devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/event_reports.py b/synapse/rest/admin/event_reports.py index 381c3fe685..bbfcaf723b 100644 --- a/synapse/rest/admin/event_reports.py +++ b/synapse/rest/admin/event_reports.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/groups.py b/synapse/rest/admin/groups.py index ebc587aa06..3b3ffde0b6 100644 --- a/synapse/rest/admin/groups.py +++ b/synapse/rest/admin/groups.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 40646ef241..24dd46113a 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # diff --git a/synapse/rest/admin/purge_room_servlet.py b/synapse/rest/admin/purge_room_servlet.py index 49966ee3e0..2365ff7a0f 100644 --- a/synapse/rest/admin/purge_room_servlet.py +++ b/synapse/rest/admin/purge_room_servlet.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index cfe1bebb91..d0cf121743 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py index f495666f4a..cc3ab5854b 100644 --- a/synapse/rest/admin/server_notice_servlet.py +++ b/synapse/rest/admin/server_notice_servlet.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py index f2490e382d..948de94ccd 100644 --- a/synapse/rest/admin/statistics.py +++ b/synapse/rest/admin/statistics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 04990c71fb..8c9d21d3ea 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,6 +14,7 @@ import hashlib import hmac import logging +import secrets from http import HTTPStatus from typing import TYPE_CHECKING, Dict, List, Optional, Tuple @@ -376,7 +376,7 @@ class UserRegisterServlet(RestServlet): """ self._clear_old_nonces() - nonce = self.hs.get_secrets().token_hex(64) + nonce = secrets.token_hex(64) self.nonces[nonce] = int(self.reactor.seconds()) return 200, {"nonce": nonce} diff --git a/synapse/rest/client/__init__.py b/synapse/rest/client/__init__.py index fe0ac3f8e9..629e2df74a 100644 --- a/synapse/rest/client/__init__.py +++ b/synapse/rest/client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 7be5c0fb88..94ff3719ce 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/__init__.py b/synapse/rest/client/v1/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/rest/client/v1/__init__.py +++ b/synapse/rest/client/v1/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index e5af26b176..ae92a3df8e 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index 6de4078290..ee7454996e 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py index 91da0ee573..bef1edc838 100644 --- a/synapse/rest/client/v1/initial_sync.py +++ b/synapse/rest/client/v1/initial_sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 3151e72d4f..42e709ec14 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py index ad8cea49c6..5aa7908d73 100644 --- a/synapse/rest/client/v1/logout.py +++ b/synapse/rest/client/v1/logout.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 94bfe2d1b0..9d2d020c25 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,10 +35,15 @@ class PresenceStatusRestServlet(RestServlet): self.clock = hs.get_clock() self.auth = hs.get_auth() + self._use_presence = hs.config.server.use_presence + async def on_GET(self, request, user_id): requester = await self.auth.get_user_by_req(request) user = UserID.from_string(user_id) + if not self._use_presence: + return 200, {"presence": "offline"} + if requester.user != user: allowed = await self.presence_handler.is_visible( observed_user=user, observer_user=requester.user @@ -79,7 +83,7 @@ class PresenceStatusRestServlet(RestServlet): except Exception: raise SynapseError(400, "Unable to parse state") - if self.hs.config.use_presence: + if self._use_presence: await self.presence_handler.set_state(user, state) return 200, {} diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index d77e20e135..0536faec9a 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 241e535917..be29a0b39e 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 0c148a213d..18102eca6c 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index d01b738e32..2d6b482be2 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py index d07ca2c47c..c780ffded5 100644 --- a/synapse/rest/client/v1/voip.py +++ b/synapse/rest/client/v1/voip.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/rest/client/v2_alpha/__init__.py +++ b/synapse/rest/client/v2_alpha/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py index f016b4f1bd..0443f4571c 100644 --- a/synapse/rest/client/v2_alpha/_base.py +++ b/synapse/rest/client/v2_alpha/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 3076706571..fb5b25ccdc 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018, 2019 New Vector Ltd @@ -42,7 +41,7 @@ from synapse.push.mailer import Mailer from synapse.types import UserID from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.stringutils import assert_valid_client_secret, random_string -from synapse.util.threepids import canonicalise_email, check_3pid_allowed +from synapse.util.threepids import check_3pid_allowed, validate_email from ._base import client_patterns, interactive_auth_handler @@ -95,7 +94,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): # Stored in the database "foo@bar.com" # User requests with "FOO@bar.com" would raise a Not Found error try: - email = canonicalise_email(body["email"]) + email = validate_email(body["email"]) except ValueError as e: raise SynapseError(400, str(e)) send_attempt = body["send_attempt"] @@ -256,7 +255,7 @@ class PasswordRestServlet(RestServlet): # We store all email addresses canonicalised in the DB. # (See add_threepid in synapse/handlers/auth.py) try: - threepid["address"] = canonicalise_email(threepid["address"]) + threepid["address"] = validate_email(threepid["address"]) except ValueError as e: raise SynapseError(400, str(e)) # if using email, we must know about the email they're authing with! @@ -404,7 +403,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): # Otherwise the email will be sent to "FOO@bar.com" and stored as # "foo@bar.com" in database. try: - email = canonicalise_email(body["email"]) + email = validate_email(body["email"]) except ValueError as e: raise SynapseError(400, str(e)) send_attempt = body["send_attempt"] diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index c9f13e4ac5..6b038f5cc0 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index 40c5bd4d8c..2d1ad3d3fb 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -38,12 +37,14 @@ class AccountValidityRenewServlet(RestServlet): self.account_activity_handler = hs.get_account_validity_handler() self.auth = hs.get_auth() self.account_renewed_template = ( - hs.config.account_validity_account_renewed_template + hs.config.account_validity.account_validity_account_renewed_template ) self.account_previously_renewed_template = ( - hs.config.account_validity_account_previously_renewed_template + hs.config.account_validity.account_validity_account_previously_renewed_template + ) + self.invalid_token_template = ( + hs.config.account_validity.account_validity_invalid_token_template ) - self.invalid_token_template = hs.config.account_validity_invalid_token_template async def on_GET(self, request): if b"token" not in request.args: @@ -87,7 +88,7 @@ class AccountValiditySendMailServlet(RestServlet): self.account_activity_handler = hs.get_account_validity_handler() self.auth = hs.get_auth() self.account_validity_renew_by_email_enabled = ( - self.hs.config.account_validity_renew_by_email_enabled + hs.config.account_validity.account_validity_renew_by_email_enabled ) async def on_POST(self, request): diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index 75ece1c911..6ea1b50a62 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py index 44ccf10ed4..6a24021484 100644 --- a/synapse/rest/client/v2_alpha/capabilities.py +++ b/synapse/rest/client/v2_alpha/capabilities.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index 3d07aadd39..9af05f9b11 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 7cc692643b..411667a9c8 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 08fb6b2b06..6285680c00 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index f092e5b3a2..a57ccbb5e5 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py index 87063ec8b1..0ede643c2d 100644 --- a/synapse/rest/client/v2_alpha/notifications.py +++ b/synapse/rest/client/v2_alpha/notifications.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/openid.py b/synapse/rest/client/v2_alpha/openid.py index 5b996e2d63..d3322acc38 100644 --- a/synapse/rest/client/v2_alpha/openid.py +++ b/synapse/rest/client/v2_alpha/openid.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/password_policy.py b/synapse/rest/client/v2_alpha/password_policy.py index 68b27ff23a..a83927aee6 100644 --- a/synapse/rest/client/v2_alpha/password_policy.py +++ b/synapse/rest/client/v2_alpha/password_policy.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/read_marker.py b/synapse/rest/client/v2_alpha/read_marker.py index 55c6688f52..5988fa47e5 100644 --- a/synapse/rest/client/v2_alpha/read_marker.py +++ b/synapse/rest/client/v2_alpha/read_marker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py index 6f7246a394..8cf4aebdbe 100644 --- a/synapse/rest/client/v2_alpha/receipts.py +++ b/synapse/rest/client/v2_alpha/receipts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index beca08ab5d..0be5fbb7f7 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2015-2016 OpenMarket Ltd -# Copyright 2017-2018 New Vector Ltd -# Copyright 2019 The Matrix.org Foundation C.I.C. +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,7 +31,7 @@ from synapse.api.errors import ( ) from synapse.config import ConfigError from synapse.config.captcha import CaptchaConfig -from synapse.config.consent_config import ConsentConfig +from synapse.config.consent import ConsentConfig from synapse.config.emailconfig import ThreepidBehaviour from synapse.config.ratelimiting import FederationRateLimitConfig from synapse.config.registration import RegistrationConfig @@ -52,7 +50,11 @@ from synapse.push.mailer import Mailer from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.stringutils import assert_valid_client_secret, random_string -from synapse.util.threepids import canonicalise_email, check_3pid_allowed +from synapse.util.threepids import ( + canonicalise_email, + check_3pid_allowed, + validate_email, +) from ._base import client_patterns, interactive_auth_handler @@ -114,7 +116,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): # (See on_POST in EmailThreepidRequestTokenRestServlet # in synapse/rest/client/v2_alpha/account.py) try: - email = canonicalise_email(body["email"]) + email = validate_email(body["email"]) except ValueError as e: raise SynapseError(400, str(e)) send_attempt = body["send_attempt"] diff --git a/synapse/rest/client/v2_alpha/relations.py b/synapse/rest/client/v2_alpha/relations.py index fe765da23c..c7da6759db 100644 --- a/synapse/rest/client/v2_alpha/relations.py +++ b/synapse/rest/client/v2_alpha/relations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py index 215d619ca1..2c169abbf3 100644 --- a/synapse/rest/client/v2_alpha/report_event.py +++ b/synapse/rest/client/v2_alpha/report_event.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 53de97923f..263596be86 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017, 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py index 147920767f..6d1b083acb 100644 --- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 79c1b526ee..f8dcee603c 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/shared_rooms.py b/synapse/rest/client/v2_alpha/shared_rooms.py index c866d5151c..d2e7f04b40 100644 --- a/synapse/rest/client/v2_alpha/shared_rooms.py +++ b/synapse/rest/client/v2_alpha/shared_rooms.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Half-Shot # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index c01ba14cd2..042e1788b6 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index a97cd66c52..c14f83be18 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py index 0c127a1b5f..b5c67c9bb6 100644 --- a/synapse/rest/client/v2_alpha/thirdparty.py +++ b/synapse/rest/client/v2_alpha/thirdparty.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/tokenrefresh.py b/synapse/rest/client/v2_alpha/tokenrefresh.py index 79317c74ba..b2f858545c 100644 --- a/synapse/rest/client/v2_alpha/tokenrefresh.py +++ b/synapse/rest/client/v2_alpha/tokenrefresh.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py index eeddfa31f8..9b091b0913 100644 --- a/synapse/rest/client/v2_alpha/user_directory.py +++ b/synapse/rest/client/v2_alpha/user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 54eed3251b..98331b3a46 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 8b9ef26cf2..b19cd8afc5 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -33,14 +32,6 @@ TEMPLATE_LANGUAGE = "en" logger = logging.getLogger(__name__) -# use hmac.compare_digest if we have it (python 2.7.7), else just use equality -if hasattr(hmac, "compare_digest"): - compare_digest = hmac.compare_digest -else: - - def compare_digest(a, b): - return a == b - class ConsentResource(DirectServeHtmlResource): """A twisted Resource to display a privacy policy and gather consent to it @@ -210,5 +201,5 @@ class ConsentResource(DirectServeHtmlResource): .encode("ascii") ) - if not compare_digest(want_mac, userhmac): + if not hmac.compare_digest(want_mac, userhmac): raise SynapseError(HTTPStatus.FORBIDDEN, "HMAC incorrect") diff --git a/synapse/rest/health.py b/synapse/rest/health.py index 0170950bf3..4487b54abf 100644 --- a/synapse/rest/health.py +++ b/synapse/rest/health.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/key/__init__.py b/synapse/rest/key/__init__.py index fe0ac3f8e9..629e2df74a 100644 --- a/synapse/rest/key/__init__.py +++ b/synapse/rest/key/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py index cb5abcf826..c6c63073ea 100644 --- a/synapse/rest/key/v2/__init__.py +++ b/synapse/rest/key/v2/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index d8e8e48c1c..e8dbe240d8 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index c57ac22e58..f648678b09 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -144,7 +144,7 @@ class RemoteKey(DirectServeJsonResource): # Note that the value is unused. cache_misses = {} # type: Dict[str, Dict[str, int]] - for (server_name, key_id, from_server), results in cached.items(): + for (server_name, key_id, _), results in cached.items(): results = [(result["ts_added_ms"], result) for result in results] if not results and key_id is not None: @@ -206,7 +206,7 @@ class RemoteKey(DirectServeJsonResource): # Cast to bytes since postgresql returns a memoryview. json_results.add(bytes(most_recent_result["key_json"])) else: - for ts_added, result in results: + for _, result in results: # Cast to bytes since postgresql returns a memoryview. json_results.add(bytes(result["key_json"])) diff --git a/synapse/rest/media/v1/__init__.py b/synapse/rest/media/v1/__init__.py index 3b8c96e267..d20186bbd0 100644 --- a/synapse/rest/media/v1/__init__.py +++ b/synapse/rest/media/v1/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index 6366947071..0fb4cd81f1 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py index c41a7ab412..a1d36e5cf1 100644 --- a/synapse/rest/media/v1/config_resource.py +++ b/synapse/rest/media/v1/config_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 Will Hunt <will@half-shot.uk> # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # @@ -31,7 +30,7 @@ class MediaConfigResource(DirectServeJsonResource): def __init__(self, hs: "HomeServer"): super().__init__() - config = hs.get_config() + config = hs.config self.clock = hs.get_clock() self.auth = hs.get_auth() self.limits_dict = {"m.upload.size": config.max_upload_size} diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index 5dadaeaf57..cd2468f9c5 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index 7792f26e78..09531ebf54 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # @@ -22,7 +21,7 @@ from typing import Callable, List NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") -def _wrap_in_base_path(func: "Callable[..., str]") -> "Callable[..., str]": +def _wrap_in_base_path(func: Callable[..., str]) -> Callable[..., str]: """Takes a function that returns a relative path and turns it into an absolute path based on the location of the primary media store """ diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 0c041b542d..e8a875b900 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2021 The Matrix.org Foundation C.I.C. # @@ -468,6 +467,9 @@ class MediaRepository: return media_info def _get_thumbnail_requirements(self, media_type): + scpos = media_type.find(";") + if scpos > 0: + media_type = media_type[:scpos] return self.thumbnail_requirements.get(media_type, ()) def _generate_thumbnail( diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index b1b1c9e6ec..c7fd97c46c 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 814145a04a..0adfb1a70f 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index 031947557d..0ff6ad3c0c 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index af802bc0b1..a029d426f0 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index 988f52c78f..37fe582390 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 0138b2e2d1..024a105bf2 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # @@ -52,8 +51,6 @@ class UploadResource(DirectServeJsonResource): async def _async_render_POST(self, request: SynapseRequest) -> None: requester = await self.auth.get_user_by_req(request) - # TODO: The checks here are a bit late. The content will have - # already been uploaded to a tmp file at this point content_length = request.getHeader("Content-Length") if content_length is None: raise SynapseError(msg="Request must specify a Content-Length", code=400) diff --git a/synapse/rest/synapse/__init__.py b/synapse/rest/synapse/__init__.py index c0b733488b..6ef4fbe8f7 100644 --- a/synapse/rest/synapse/__init__.py +++ b/synapse/rest/synapse/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index 9eeb970580..47a2f72b32 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/new_user_consent.py b/synapse/rest/synapse/client/new_user_consent.py index 78ee0b5e88..488b97b32e 100644 --- a/synapse/rest/synapse/client/new_user_consent.py +++ b/synapse/rest/synapse/client/new_user_consent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -62,6 +61,15 @@ class NewUserConsentResource(DirectServeHtmlResource): self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code) return + # It should be impossible to get here without having first been through + # the pick-a-username step, which ensures chosen_localpart gets set. + if not session.chosen_localpart: + logger.warning("Session has no user name selected") + self._sso_handler.render_error( + request, "no_user", "No user name has been selected.", code=400 + ) + return + user_id = UserID(session.chosen_localpart, self._server_name) user_profile = { "display_name": session.display_name, diff --git a/synapse/rest/synapse/client/oidc/__init__.py b/synapse/rest/synapse/client/oidc/__init__.py index 64c0deb75d..36ba401656 100644 --- a/synapse/rest/synapse/client/oidc/__init__.py +++ b/synapse/rest/synapse/client/oidc/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/oidc/callback_resource.py b/synapse/rest/synapse/client/oidc/callback_resource.py index 1af33f0a45..7785f17e90 100644 --- a/synapse/rest/synapse/client/oidc/callback_resource.py +++ b/synapse/rest/synapse/client/oidc/callback_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/password_reset.py b/synapse/rest/synapse/client/password_reset.py index d26ce46efc..f2800bf2db 100644 --- a/synapse/rest/synapse/client/password_reset.py +++ b/synapse/rest/synapse/client/password_reset.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/pick_idp.py b/synapse/rest/synapse/client/pick_idp.py index 9550b82998..d3a94a9349 100644 --- a/synapse/rest/synapse/client/pick_idp.py +++ b/synapse/rest/synapse/client/pick_idp.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py index d9ffe84489..9b002cc15e 100644 --- a/synapse/rest/synapse/client/pick_username.py +++ b/synapse/rest/synapse/client/pick_username.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/saml2/__init__.py b/synapse/rest/synapse/client/saml2/__init__.py index 3e8235ee1e..781ccb237c 100644 --- a/synapse/rest/synapse/client/saml2/__init__.py +++ b/synapse/rest/synapse/client/saml2/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py index 1e8526e22e..b37c7083dc 100644 --- a/synapse/rest/synapse/client/saml2/metadata_resource.py +++ b/synapse/rest/synapse/client/saml2/metadata_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/synapse/client/saml2/response_resource.py b/synapse/rest/synapse/client/saml2/response_resource.py index 4dfadf1bfb..774ccd870f 100644 --- a/synapse/rest/synapse/client/saml2/response_resource.py +++ b/synapse/rest/synapse/client/saml2/response_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Copyright 2018 New Vector Ltd # diff --git a/synapse/rest/synapse/client/sso_register.py b/synapse/rest/synapse/client/sso_register.py index f2acce2437..70cd148a76 100644 --- a/synapse/rest/synapse/client/sso_register.py +++ b/synapse/rest/synapse/client/sso_register.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index f591cc6c5c..19ac3af337 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/secrets.py b/synapse/secrets.py deleted file mode 100644 index 7939db75e7..0000000000 --- a/synapse/secrets.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Injectable secrets module for Synapse. - -See https://docs.python.org/3/library/secrets.html#module-secrets for the API -used in Python 3.6, and the API emulated in Python 2.7. -""" -import sys - -# secrets is available since python 3.6 -if sys.version_info[0:2] >= (3, 6): - import secrets - - class Secrets: - def token_bytes(self, nbytes: int = 32) -> bytes: - return secrets.token_bytes(nbytes) - - def token_hex(self, nbytes: int = 32) -> str: - return secrets.token_hex(nbytes) - - -else: - import binascii - import os - - class Secrets: - def token_bytes(self, nbytes: int = 32) -> bytes: - return os.urandom(nbytes) - - def token_hex(self, nbytes: int = 32) -> str: - return binascii.hexlify(self.token_bytes(nbytes)).decode("ascii") diff --git a/synapse/server.py b/synapse/server.py index cfb55c230d..2337d2d9b4 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -71,13 +70,14 @@ from synapse.handlers.acme import AcmeHandler from synapse.handlers.admin import AdminHandler from synapse.handlers.appservice import ApplicationServicesHandler from synapse.handlers.auth import AuthHandler, MacaroonGenerator -from synapse.handlers.cas_handler import CasHandler +from synapse.handlers.cas import CasHandler from synapse.handlers.deactivate_account import DeactivateAccountHandler from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler from synapse.handlers.devicemessage import DeviceMessageHandler from synapse.handlers.directory import DirectoryHandler from synapse.handlers.e2e_keys import E2eKeysHandler from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler +from synapse.handlers.event_auth import EventAuthHandler from synapse.handlers.events import EventHandler, EventStreamHandler from synapse.handlers.federation import FederationHandler from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler @@ -86,7 +86,11 @@ from synapse.handlers.initial_sync import InitialSyncHandler from synapse.handlers.message import EventCreationHandler, MessageHandler from synapse.handlers.pagination import PaginationHandler from synapse.handlers.password_policy import PasswordPolicyHandler -from synapse.handlers.presence import PresenceHandler +from synapse.handlers.presence import ( + BasePresenceHandler, + PresenceHandler, + WorkerPresenceHandler, +) from synapse.handlers.profile import ProfileHandler from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.receipts import ReceiptsHandler @@ -122,7 +126,6 @@ from synapse.rest.media.v1.media_repository import ( MediaRepository, MediaRepositoryResource, ) -from synapse.secrets import Secrets from synapse.server_notices.server_notices_manager import ServerNoticesManager from synapse.server_notices.server_notices_sender import ServerNoticesSender from synapse.server_notices.worker_server_notices_sender import ( @@ -142,8 +145,8 @@ logger = logging.getLogger(__name__) if TYPE_CHECKING: from txredisapi import RedisProtocol - from synapse.handlers.oidc_handler import OidcHandler - from synapse.handlers.saml_handler import SamlHandler + from synapse.handlers.oidc import OidcHandler + from synapse.handlers.saml import SamlHandler T = TypeVar("T", bound=Callable[..., Any]) @@ -283,6 +286,14 @@ class HomeServer(metaclass=abc.ABCMeta): if self.config.run_background_tasks: self.setup_background_tasks() + def start_listening(self) -> None: + """Start the HTTP, manhole, metrics, etc listeners + + Does nothing in this base class; overridden in derived classes to start the + appropriate listeners. + """ + pass + def setup_background_tasks(self) -> None: """ Some handlers have side effects on instantiation (like registering @@ -320,9 +331,6 @@ class HomeServer(metaclass=abc.ABCMeta): return self.datastores - def get_config(self) -> HomeServerConfig: - return self.config - @cache_in_self def get_distributor(self) -> Distributor: return Distributor() @@ -416,8 +424,11 @@ class HomeServer(metaclass=abc.ABCMeta): return StateResolutionHandler(self) @cache_in_self - def get_presence_handler(self) -> PresenceHandler: - return PresenceHandler(self) + def get_presence_handler(self) -> BasePresenceHandler: + if self.get_instance_name() in self.config.worker.writers.presence: + return PresenceHandler(self) + else: + return WorkerPresenceHandler(self) @cache_in_self def get_typing_writer_handler(self) -> TypingWriterHandler: @@ -630,10 +641,6 @@ class HomeServer(metaclass=abc.ABCMeta): return GroupAttestionRenewer(self) @cache_in_self - def get_secrets(self) -> Secrets: - return Secrets() - - @cache_in_self def get_stats_handler(self) -> StatsHandler: return StatsHandler(self) @@ -693,13 +700,13 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_saml_handler(self) -> "SamlHandler": - from synapse.handlers.saml_handler import SamlHandler + from synapse.handlers.saml import SamlHandler return SamlHandler(self) @cache_in_self def get_oidc_handler(self) -> "OidcHandler": - from synapse.handlers.oidc_handler import OidcHandler + from synapse.handlers.oidc import OidcHandler return OidcHandler(self) @@ -744,6 +751,10 @@ class HomeServer(metaclass=abc.ABCMeta): return SpaceSummaryHandler(self) @cache_in_self + def get_event_auth_handler(self) -> EventAuthHandler: + return EventAuthHandler(self) + + @cache_in_self def get_external_cache(self) -> ExternalCache: return ExternalCache(self) diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py index a9349bf9a1..e65f6f88fe 100644 --- a/synapse/server_notices/consent_server_notices.py +++ b/synapse/server_notices/consent_server_notices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index a18a2e76c9..e4b0bc5c72 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index 144e1da78e..f19075b760 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/server_notices/server_notices_sender.py b/synapse/server_notices/server_notices_sender.py index 965c645889..c875b15b32 100644 --- a/synapse/server_notices/server_notices_sender.py +++ b/synapse/server_notices/server_notices_sender.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/server_notices/worker_server_notices_sender.py b/synapse/server_notices/worker_server_notices_sender.py index c76bd57460..cc53318491 100644 --- a/synapse/server_notices/worker_server_notices_sender.py +++ b/synapse/server_notices/worker_server_notices_sender.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/spam_checker_api/__init__.py b/synapse/spam_checker_api/__init__.py index 3ce25bb012..73018f2d00 100644 --- a/synapse/spam_checker_api/__init__.py +++ b/synapse/spam_checker_api/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index c0f79ffdc8..b3bd92d37c 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # @@ -20,6 +19,7 @@ from typing import ( Any, Awaitable, Callable, + Collection, DefaultDict, Dict, FrozenSet, @@ -47,7 +47,7 @@ from synapse.logging.utils import log_function from synapse.state import v1, v2 from synapse.storage.databases.main.events_worker import EventRedactBehaviour from synapse.storage.roommember import ProfileInfo -from synapse.types import Collection, StateMap +from synapse.types import StateMap from synapse.util.async_helpers import Linearizer from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import Measure, measure_func diff --git a/synapse/state/v1.py b/synapse/state/v1.py index ce255da6fd..318e998813 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/state/v2.py b/synapse/state/v2.py index e73a548ee4..008644cd98 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,6 +18,7 @@ import logging from typing import ( Any, Callable, + Collection, Dict, Generator, Iterable, @@ -38,7 +38,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events import EventBase -from synapse.types import Collection, MutableStateMap, StateMap +from synapse.types import MutableStateMap, StateMap from synapse.util import Clock logger = logging.getLogger(__name__) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 0b9007e51f..105e4e1fec 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018,2019 New Vector Ltd # diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 240905329f..6b68d8720c 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -17,13 +16,13 @@ import logging import random from abc import ABCMeta -from typing import TYPE_CHECKING, Any, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional, Union from synapse.storage.database import LoggingTransaction # noqa: F401 from synapse.storage.database import make_in_list_sql_clause # noqa: F401 from synapse.storage.database import DatabasePool from synapse.storage.types import Connection -from synapse.types import Collection, StreamToken, get_domain_from_id +from synapse.types import StreamToken, get_domain_from_id from synapse.util import json_decoder if TYPE_CHECKING: @@ -115,7 +114,7 @@ def db_to_json(db_content: Union[memoryview, bytes, bytearray, str]) -> Any: db_content = db_content.tobytes() # Decode it to a Unicode string before feeding it to the JSON decoder, since - # Python 3.5 does not support deserializing bytes. + # it only supports handling strings if isinstance(db_content, (bytes, bytearray)): db_content = db_content.decode("utf8") diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index ccb06aab39..142787fdfd 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 77ef29ec71..bd39c095af 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -21,6 +20,7 @@ from time import monotonic as monotonic_time from typing import ( Any, Callable, + Collection, Dict, Iterable, Iterator, @@ -49,7 +49,6 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.types import Connection, Cursor -from synapse.types import Collection # python 3 does not have a maximum int value MAX_TXN_ID = 2 ** 63 - 1 @@ -172,10 +171,7 @@ class LoggingDatabaseConnection: # The type of entry which goes on our after_callbacks and exception_callbacks lists. -# -# Python 3.5.2 doesn't support Callable with an ellipsis, so we wrap it in quotes so -# that mypy sees the type but the runtime python doesn't. -_CallbackListEntry = Tuple["Callable[..., None]", Iterable[Any], Dict[str, Any]] +_CallbackListEntry = Tuple[Callable[..., None], Iterable[Any], Dict[str, Any]] R = TypeVar("R") @@ -222,7 +218,7 @@ class LoggingTransaction: self.after_callbacks = after_callbacks self.exception_callbacks = exception_callbacks - def call_after(self, callback: "Callable[..., None]", *args: Any, **kwargs: Any): + def call_after(self, callback: Callable[..., None], *args: Any, **kwargs: Any): """Call the given callback on the main twisted thread after the transaction has finished. Used to invalidate the caches on the correct thread. @@ -234,7 +230,7 @@ class LoggingTransaction: self.after_callbacks.append((callback, args, kwargs)) def call_on_exception( - self, callback: "Callable[..., None]", *args: Any, **kwargs: Any + self, callback: Callable[..., None], *args: Any, **kwargs: Any ): # if self.exception_callbacks is None, that means that whatever constructed the # LoggingTransaction isn't expecting there to be any callbacks; assert that @@ -486,7 +482,7 @@ class DatabasePool: desc: str, after_callbacks: List[_CallbackListEntry], exception_callbacks: List[_CallbackListEntry], - func: "Callable[..., R]", + func: Callable[..., R], *args: Any, **kwargs: Any, ) -> R: @@ -619,7 +615,7 @@ class DatabasePool: async def runInteraction( self, desc: str, - func: "Callable[..., R]", + func: Callable[..., R], *args: Any, db_autocommit: bool = False, **kwargs: Any, @@ -679,7 +675,7 @@ class DatabasePool: async def runWithConnection( self, - func: "Callable[..., R]", + func: Callable[..., R], *args: Any, db_autocommit: bool = False, **kwargs: Any, diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index 379c78bb83..20b755056b 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index b3d16ca7ac..49c7606d51 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2019-2021 The Matrix.org Foundation C.I.C. @@ -18,7 +17,6 @@ import logging from typing import List, Optional, Tuple -from synapse.api.constants import PresenceState from synapse.config.homeserver import HomeServerConfig from synapse.storage.database import DatabasePool from synapse.storage.databases.main.stats import UserSortOrder @@ -52,7 +50,7 @@ from .media_repository import MediaRepositoryStore from .metrics import ServerMetricsStore from .monthly_active_users import MonthlyActiveUsersStore from .openid import OpenIdStore -from .presence import PresenceStore, UserPresenceState +from .presence import PresenceStore from .profile import ProfileStore from .purge_events import PurgeEventsStore from .push_rule import PushRuleStore @@ -127,9 +125,6 @@ class DataStore( self._clock = hs.get_clock() self.database_engine = database.engine - self._presence_id_gen = StreamIdGenerator( - db_conn, "presence_stream", "stream_id" - ) self._public_room_id_gen = StreamIdGenerator( db_conn, "public_room_list_stream", "stream_id" ) @@ -178,21 +173,6 @@ class DataStore( super().__init__(database, db_conn, hs) - self._presence_on_startup = self._get_active_presence(db_conn) - - presence_cache_prefill, min_presence_val = self.db_pool.get_cache_dict( - db_conn, - "presence_stream", - entity_column="user_id", - stream_column="stream_id", - max_value=self._presence_id_gen.get_current_token(), - ) - self.presence_stream_cache = StreamChangeCache( - "PresenceStreamChangeCache", - min_presence_val, - prefilled_cache=presence_cache_prefill, - ) - device_list_max = self._device_list_id_gen.get_current_token() self._device_list_stream_cache = StreamChangeCache( "DeviceListStreamChangeCache", device_list_max @@ -239,32 +219,6 @@ class DataStore( def get_device_stream_token(self) -> int: return self._device_list_id_gen.get_current_token() - def take_presence_startup_info(self): - active_on_startup = self._presence_on_startup - self._presence_on_startup = None - return active_on_startup - - def _get_active_presence(self, db_conn): - """Fetch non-offline presence from the database so that we can register - the appropriate time outs. - """ - - sql = ( - "SELECT user_id, state, last_active_ts, last_federation_update_ts," - " last_user_sync_ts, status_msg, currently_active FROM presence_stream" - " WHERE state != ?" - ) - - txn = db_conn.cursor() - txn.execute(sql, (PresenceState.OFFLINE,)) - rows = self.db_pool.cursor_to_dict(txn) - txn.close() - - for row in rows: - row["currently_active"] = bool(row["currently_active"]) - - return [UserPresenceState(**row) for row in rows] - async def get_users(self) -> List[JsonDict]: """Function to retrieve a list of users in users table. diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index a277a1ef13..1d02795f43 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 85bb853d33..9f182c2a89 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 1e7637a6f5..ecc1f935e2 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py index 3e26d5ba87..f22c1f241b 100644 --- a/synapse/storage/databases/main/censor_events.py +++ b/synapse/storage/databases/main/censor_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index ea3c15fd0e..d60010e942 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 691080ce74..7c9d1f744e 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 9bf8ba888f..c9346de316 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2019,2020 The Matrix.org Foundation C.I.C. @@ -16,7 +15,7 @@ # limitations under the License. import abc import logging -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple +from typing import Any, Collection, Dict, Iterable, List, Optional, Set, Tuple from synapse.api.errors import Codes, StoreError from synapse.logging.opentracing import ( @@ -32,7 +31,7 @@ from synapse.storage.database import ( LoggingTransaction, make_tuple_comparison_clause, ) -from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key +from synapse.types import JsonDict, get_verify_key_from_cross_signing_key from synapse.util import json_decoder, json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.lrucache import LruCache @@ -718,7 +717,15 @@ class DeviceWorkerStore(SQLBaseStore): keyvalues={"user_id": user_id}, values={}, insertion_values={"added_ts": self._clock.time_msec()}, - desc="make_remote_user_device_cache_as_stale", + desc="mark_remote_user_device_cache_as_stale", + ) + + async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None: + # Remove the database entry that says we need to resync devices, after a resync + await self.db_pool.simple_delete( + table="device_lists_remote_resync", + keyvalues={"user_id": user_id}, + desc="mark_remote_user_device_cache_as_valid", ) async def mark_remote_user_device_list_as_unsubscribed(self, user_id: str) -> None: @@ -1290,15 +1297,6 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): lock=False, ) - # If we're replacing the remote user's device list cache presumably - # we've done a full resync, so we remove the entry that says we need - # to resync - self.db_pool.simple_delete_txn( - txn, - table="device_lists_remote_resync", - keyvalues={"user_id": user_id}, - ) - async def add_device_change_to_streams( self, user_id: str, device_ids: Collection[str], hosts: List[str] ): diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py index 267b948397..86075bc55b 100644 --- a/synapse/storage/databases/main/directory.py +++ b/synapse/storage/databases/main/directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py index 12cecceec2..b15fb71e62 100644 --- a/synapse/storage/databases/main/e2e_room_keys.py +++ b/synapse/storage/databases/main/e2e_room_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # Copyright 2019 Matrix.org Foundation C.I.C. # diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index f1e7859d26..88afe97c41 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2019,2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index a956be491a..ff81d5cd17 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,7 @@ import itertools import logging from queue import Empty, PriorityQueue -from typing import Dict, Iterable, List, Set, Tuple +from typing import Collection, Dict, Iterable, List, Set, Tuple from synapse.api.errors import StoreError from synapse.events import EventBase @@ -26,7 +25,6 @@ from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.databases.main.signatures import SignatureWorkerStore from synapse.storage.engines import PostgresEngine from synapse.storage.types import Cursor -from synapse.types import Collection from synapse.util.caches.descriptors import cached from synapse.util.caches.lrucache import LruCache from synapse.util.iterutils import batch_iter diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 78245ad5bd..5845322118 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index ad17123915..fd25c8112d 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -171,7 +170,7 @@ class PersistEventsStore: ) async with stream_ordering_manager as stream_orderings: - for (event, context), stream in zip(events_and_contexts, stream_orderings): + for (event, _), stream in zip(events_and_contexts, stream_orderings): event.internal_metadata.stream_ordering = stream await self.db_pool.runInteraction( @@ -298,7 +297,7 @@ class PersistEventsStore: txn.execute(sql + clause, args) to_recursively_check = [] - for event_id, prev_event_id, metadata, rejected in txn: + for _, prev_event_id, metadata, rejected in txn: if prev_event_id in existing_prevs: continue @@ -1128,7 +1127,7 @@ class PersistEventsStore: def _update_forward_extremities_txn( self, txn, new_forward_extremities, max_stream_order ): - for room_id, new_extrem in new_forward_extremities.items(): + for room_id in new_forward_extremities.keys(): self.db_pool.simple_delete_txn( txn, table="event_forward_extremities", keyvalues={"room_id": room_id} ) @@ -1379,24 +1378,28 @@ class PersistEventsStore: ], ) - for event, _ in events_and_contexts: - if not event.internal_metadata.is_redacted(): - # If we're persisting an unredacted event we go and ensure - # that we mark any redactions that reference this event as - # requiring censoring. - self.db_pool.simple_update_txn( - txn, - table="redactions", - keyvalues={"redacts": event.event_id}, - updatevalues={"have_censored": False}, + # If we're persisting an unredacted event we go and ensure + # that we mark any redactions that reference this event as + # requiring censoring. + sql = "UPDATE redactions SET have_censored = ? WHERE redacts = ?" + txn.execute_batch( + sql, + ( + ( + False, + event.event_id, ) + for event, _ in events_and_contexts + if not event.internal_metadata.is_redacted() + ), + ) state_events_and_contexts = [ ec for ec in events_and_contexts if ec[0].is_state() ] state_values = [] - for event, context in state_events_and_contexts: + for event, _ in state_events_and_contexts: vals = { "event_id": event.event_id, "room_id": event.room_id, @@ -1465,7 +1468,7 @@ class PersistEventsStore: # nothing to do here return - for event, context in events_and_contexts: + for event, _ in events_and_contexts: if event.type == EventTypes.Redaction and event.redacts is not None: # Remove the entries in the event_push_actions table for the # redacted event. @@ -1882,20 +1885,28 @@ class PersistEventsStore: ), ) - for event, _ in events_and_contexts: - user_ids = self.db_pool.simple_select_onecol_txn( - txn, - table="event_push_actions_staging", - keyvalues={"event_id": event.event_id}, - retcol="user_id", - ) + room_to_event_ids = {} # type: Dict[str, List[str]] + for e, _ in events_and_contexts: + room_to_event_ids.setdefault(e.room_id, []).append(e.event_id) - for uid in user_ids: - txn.call_after( - self.store.get_unread_event_push_actions_by_room_for_user.invalidate_many, - (event.room_id, uid), + for room_id, event_ids in room_to_event_ids.items(): + rows = self.db_pool.simple_select_many_txn( + txn, + table="event_push_actions_staging", + column="event_id", + iterable=event_ids, + keyvalues={}, + retcols=("user_id",), ) + user_ids = {row["user_id"] for row in rows} + + for user_id in user_ids: + txn.call_after( + self.store.get_unread_event_push_actions_by_room_for_user.invalidate_many, + (room_id, user_id), + ) + # Now we delete the staging area for *all* events that were being # persisted. txn.execute_batch( diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index 79e7df6ca9..cbe4be1437 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/events_forward_extremities.py b/synapse/storage/databases/main/events_forward_extremities.py index b3703ae161..6d2688d711 100644 --- a/synapse/storage/databases/main/events_forward_extremities.py +++ b/synapse/storage/databases/main/events_forward_extremities.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index c00780969f..2c823e09cf 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,7 +15,16 @@ import logging import threading from collections import namedtuple -from typing import Container, Dict, Iterable, List, Optional, Tuple, overload +from typing import ( + Collection, + Container, + Dict, + Iterable, + List, + Optional, + Tuple, + overload, +) from constantly import NamedConstant, Names from typing_extensions import Literal @@ -46,7 +54,7 @@ from synapse.storage.database import DatabasePool from synapse.storage.engines import PostgresEngine from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator from synapse.storage.util.sequence import build_sequence_generator -from synapse.types import Collection, JsonDict, get_domain_from_id +from synapse.types import JsonDict, get_domain_from_id from synapse.util.caches.descriptors import cached from synapse.util.caches.lrucache import LruCache from synapse.util.iterutils import batch_iter diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index d2f5b9a502..bb244a03c0 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/group_server.py b/synapse/storage/databases/main/group_server.py index bd7826f4e9..66ad363bfb 100644 --- a/synapse/storage/databases/main/group_server.py +++ b/synapse/storage/databases/main/group_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index d504323b03..0e86807834 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd. # diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b7820ac7ff..c584868188 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020-2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index 614a418a15..c3f551d377 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index 757da3d55d..fe25638289 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 0ff693a310..db22fab23e 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,16 +12,69 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Tuple +from typing import TYPE_CHECKING, Dict, List, Tuple -from synapse.api.presence import UserPresenceState +from synapse.api.presence import PresenceState, UserPresenceState +from synapse.replication.tcp.streams import PresenceStream from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause +from synapse.storage.database import DatabasePool +from synapse.storage.engines import PostgresEngine +from synapse.storage.types import Connection +from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator from synapse.util.caches.descriptors import cached, cachedList +from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.iterutils import batch_iter +if TYPE_CHECKING: + from synapse.server import HomeServer + class PresenceStore(SQLBaseStore): + def __init__( + self, + database: DatabasePool, + db_conn: Connection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + + self._can_persist_presence = ( + hs.get_instance_name() in hs.config.worker.writers.presence + ) + + if isinstance(database.engine, PostgresEngine): + self._presence_id_gen = MultiWriterIdGenerator( + db_conn=db_conn, + db=database, + stream_name="presence_stream", + instance_name=self._instance_name, + tables=[("presence_stream", "instance_name", "stream_id")], + sequence_name="presence_stream_sequence", + writers=hs.config.worker.writers.to_device, + ) + else: + self._presence_id_gen = StreamIdGenerator( + db_conn, "presence_stream", "stream_id" + ) + + self._presence_on_startup = self._get_active_presence(db_conn) + + presence_cache_prefill, min_presence_val = self.db_pool.get_cache_dict( + db_conn, + "presence_stream", + entity_column="user_id", + stream_column="stream_id", + max_value=self._presence_id_gen.get_current_token(), + ) + self.presence_stream_cache = StreamChangeCache( + "PresenceStreamChangeCache", + min_presence_val, + prefilled_cache=presence_cache_prefill, + ) + async def update_presence(self, presence_states): + assert self._can_persist_presence + stream_ordering_manager = self._presence_id_gen.get_next_mult( len(presence_states) ) @@ -58,6 +110,7 @@ class PresenceStore(SQLBaseStore): "last_user_sync_ts": state.last_user_sync_ts, "status_msg": state.status_msg, "currently_active": state.currently_active, + "instance_name": self._instance_name, } for stream_id, state in zip(stream_orderings, presence_states) ], @@ -217,3 +270,37 @@ class PresenceStore(SQLBaseStore): def get_current_presence_token(self): return self._presence_id_gen.get_current_token() + + def _get_active_presence(self, db_conn: Connection): + """Fetch non-offline presence from the database so that we can register + the appropriate time outs. + """ + + sql = ( + "SELECT user_id, state, last_active_ts, last_federation_update_ts," + " last_user_sync_ts, status_msg, currently_active FROM presence_stream" + " WHERE state != ?" + ) + + txn = db_conn.cursor() + txn.execute(sql, (PresenceState.OFFLINE,)) + rows = self.db_pool.cursor_to_dict(txn) + txn.close() + + for row in rows: + row["currently_active"] = bool(row["currently_active"]) + + return [UserPresenceState(**row) for row in rows] + + def take_presence_startup_info(self): + active_on_startup = self._presence_on_startup + self._presence_on_startup = None + return active_on_startup + + def process_replication_rows(self, stream_name, instance_name, token, rows): + if stream_name == PresenceStream.NAME: + self._presence_id_gen.advance(instance_name, token) + for row in rows: + self.presence_stream_cache.entity_has_changed(row.user_id, token) + self._get_presence_for_user.invalidate((row.user_id,)) + return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 1e65cb8a04..1a34118f7e 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index 41f4fe7f95..8f83748b5e 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 9e58dc0e6a..db52176337 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index c65558c280..b48fe086d4 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 43c852c96c..3647276acb 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index c439b58db6..d36b18a0e9 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019,2020 The Matrix.org Foundation C.I.C. @@ -92,11 +91,17 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): id_column=None, ) - self._account_validity_enabled = hs.config.account_validity_enabled + self._account_validity_enabled = ( + hs.config.account_validity.account_validity_enabled + ) + self._account_validity_period = None + self._account_validity_startup_job_max_delta = None if self._account_validity_enabled: - self._account_validity_period = hs.config.account_validity_period + self._account_validity_period = ( + hs.config.account_validity.account_validity_period + ) self._account_validity_startup_job_max_delta = ( - hs.config.account_validity_startup_job_max_delta + hs.config.account_validity.account_validity_startup_job_max_delta ) if hs.config.run_background_tasks: diff --git a/synapse/storage/databases/main/rejections.py b/synapse/storage/databases/main/rejections.py index 1e361aaa9a..167318b314 100644 --- a/synapse/storage/databases/main/rejections.py +++ b/synapse/storage/databases/main/rejections.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 5cd61547f7..2bbf6d6a95 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index a76e9ae2e7..b4e3f052cc 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index a9216ca9ae..2a8532f8c1 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # @@ -14,7 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Dict, FrozenSet, Iterable, List, Optional, Set, Tuple +from typing import ( + TYPE_CHECKING, + Collection, + Dict, + FrozenSet, + Iterable, + List, + Optional, + Set, + Tuple, + Union, +) + +import attr from synapse.api.constants import EventTypes, Membership from synapse.events import EventBase @@ -34,7 +46,7 @@ from synapse.storage.roommember import ( ProfileInfo, RoomsForUser, ) -from synapse.types import Collection, PersistedEventPosition, get_domain_from_id +from synapse.types import PersistedEventPosition, StateMap, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.caches import intern_string from synapse.util.caches.descriptors import _CacheContext, cached, cachedList @@ -54,6 +66,10 @@ class RoomMemberWorkerStore(EventsWorkerStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) + # Used by `_get_joined_hosts` to ensure only one thing mutates the cache + # at a time. Keyed by room_id. + self._joined_host_linearizer = Linearizer("_JoinedHostsCache") + # Is the current_state_events.membership up to date? Or is the # background update still running? self._current_state_events_membership_up_to_date = False @@ -174,6 +190,33 @@ class RoomMemberWorkerStore(EventsWorkerStore): txn.execute(sql, (room_id, Membership.JOIN)) return [r[0] for r in txn] + @cached(max_entries=100000, iterable=True) + async def get_users_in_room_with_profiles( + self, room_id: str + ) -> Dict[str, ProfileInfo]: + """Get a mapping from user ID to profile information for all users in a given room. + + Args: + room_id: The ID of the room to retrieve the users of. + + Returns: + A mapping from user ID to ProfileInfo. + """ + + def _get_users_in_room_with_profiles(txn) -> Dict[str, ProfileInfo]: + sql = """ + SELECT user_id, display_name, avatar_url FROM room_memberships + WHERE room_id = ? AND membership = ? + """ + txn.execute(sql, (room_id, Membership.JOIN)) + + return {r[0]: ProfileInfo(display_name=r[1], avatar_url=r[2]) for r in txn} + + return await self.db_pool.runInteraction( + "get_users_in_room_with_profiles", + _get_users_in_room_with_profiles, + ) + @cached(max_entries=100000) async def get_room_summary(self, room_id: str) -> Dict[str, MemberSummary]: """Get the details of a room roughly suitable for use by the room @@ -704,19 +747,82 @@ class RoomMemberWorkerStore(EventsWorkerStore): @cached(num_args=2, max_entries=10000, iterable=True) async def _get_joined_hosts( - self, room_id, state_group, current_state_ids, state_entry - ): - # We don't use `state_group`, its there so that we can cache based - # on it. However, its important that its never None, since two current_state's - # with a state_group of None are likely to be different. + self, + room_id: str, + state_group: int, + current_state_ids: StateMap[str], + state_entry: "_StateCacheEntry", + ) -> FrozenSet[str]: + # We don't use `state_group`, its there so that we can cache based on + # it. However, its important that its never None, since two + # current_state's with a state_group of None are likely to be different. + # + # The `state_group` must match the `state_entry.state_group` (if not None). assert state_group is not None - + assert state_entry.state_group is None or state_entry.state_group == state_group + + # We use a secondary cache of previous work to allow us to build up the + # joined hosts for the given state group based on previous state groups. + # + # We cache one object per room containing the results of the last state + # group we got joined hosts for. The idea is that generally + # `get_joined_hosts` is called with the "current" state group for the + # room, and so consecutive calls will be for consecutive state groups + # which point to the previous state group. cache = await self._get_joined_hosts_cache(room_id) - return await cache.get_destinations(state_entry) + + # If the state group in the cache matches, we already have the data we need. + if state_entry.state_group == cache.state_group: + return frozenset(cache.hosts_to_joined_users) + + # Since we'll mutate the cache we need to lock. + with (await self._joined_host_linearizer.queue(room_id)): + if state_entry.state_group == cache.state_group: + # Same state group, so nothing to do. We've already checked for + # this above, but the cache may have changed while waiting on + # the lock. + pass + elif state_entry.prev_group == cache.state_group: + # The cached work is for the previous state group, so we work out + # the delta. + for (typ, state_key), event_id in state_entry.delta_ids.items(): + if typ != EventTypes.Member: + continue + + host = intern_string(get_domain_from_id(state_key)) + user_id = state_key + known_joins = cache.hosts_to_joined_users.setdefault(host, set()) + + event = await self.get_event(event_id) + if event.membership == Membership.JOIN: + known_joins.add(user_id) + else: + known_joins.discard(user_id) + + if not known_joins: + cache.hosts_to_joined_users.pop(host, None) + else: + # The cache doesn't match the state group or prev state group, + # so we calculate the result from first principles. + joined_users = await self.get_joined_users_from_state( + room_id, state_entry + ) + + cache.hosts_to_joined_users = {} + for user_id in joined_users: + host = intern_string(get_domain_from_id(user_id)) + cache.hosts_to_joined_users.setdefault(host, set()).add(user_id) + + if state_entry.state_group: + cache.state_group = state_entry.state_group + else: + cache.state_group = object() + + return frozenset(cache.hosts_to_joined_users) @cached(max_entries=10000) def _get_joined_hosts_cache(self, room_id: str) -> "_JoinedHostsCache": - return _JoinedHostsCache(self, room_id) + return _JoinedHostsCache() @cached(num_args=2) async def did_forget(self, user_id: str, room_id: str) -> bool: @@ -1026,71 +1132,18 @@ class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore): await self.db_pool.runInteraction("forget_membership", f) +@attr.s(slots=True) class _JoinedHostsCache: - """Cache for joined hosts in a room that is optimised to handle updates - via state deltas. - """ - - def __init__(self, store, room_id): - self.store = store - self.room_id = room_id - - self.hosts_to_joined_users = {} - - self.state_group = object() + """The cached data used by the `_get_joined_hosts_cache`.""" - self.linearizer = Linearizer("_JoinedHostsCache") + # Dict of host to the set of their users in the room at the state group. + hosts_to_joined_users = attr.ib(type=Dict[str, Set[str]], factory=dict) - self._len = 0 - - async def get_destinations(self, state_entry: "_StateCacheEntry") -> Set[str]: - """Get set of destinations for a state entry - - Args: - state_entry - - Returns: - The destinations as a set. - """ - if state_entry.state_group == self.state_group: - return frozenset(self.hosts_to_joined_users) - - with (await self.linearizer.queue(())): - if state_entry.state_group == self.state_group: - pass - elif state_entry.prev_group == self.state_group: - for (typ, state_key), event_id in state_entry.delta_ids.items(): - if typ != EventTypes.Member: - continue - - host = intern_string(get_domain_from_id(state_key)) - user_id = state_key - known_joins = self.hosts_to_joined_users.setdefault(host, set()) - - event = await self.store.get_event(event_id) - if event.membership == Membership.JOIN: - known_joins.add(user_id) - else: - known_joins.discard(user_id) - - if not known_joins: - self.hosts_to_joined_users.pop(host, None) - else: - joined_users = await self.store.get_joined_users_from_state( - self.room_id, state_entry - ) - - self.hosts_to_joined_users = {} - for user_id in joined_users: - host = intern_string(get_domain_from_id(user_id)) - self.hosts_to_joined_users.setdefault(host, set()).add(user_id) - - if state_entry.state_group: - self.state_group = state_entry.state_group - else: - self.state_group = object() - self._len = sum(len(v) for v in self.hosts_to_joined_users.values()) - return frozenset(self.hosts_to_joined_users) + # The state group `hosts_to_joined_users` is derived from. Will be an object + # if the instance is newly created or if the state is not based on a state + # group. (An object is used as a sentinel value to ensure that it never is + # equal to anything else). + state_group = attr.ib(type=Union[object, int], factory=object) def __len__(self): - return self._len + return sum(len(v) for v in self.hosts_to_joined_users.values()) diff --git a/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py b/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py index b1684a8441..acd6ad1e1f 100644 --- a/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py +++ b/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/schema/delta/57/local_current_membership.py b/synapse/storage/databases/main/schema/delta/57/local_current_membership.py index 44917f0a2e..66989222e6 100644 --- a/synapse/storage/databases/main/schema/delta/57/local_current_membership.py +++ b/synapse/storage/databases/main/schema/delta/57/local_current_membership.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql b/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql new file mode 100644 index 0000000000..4836dac16e --- /dev/null +++ b/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql @@ -0,0 +1,18 @@ +/* Copyright 2020 The Matrix.org Foundation C.I.C. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Track when users renew their account using the value of the 'renewal_token' column. +-- This field should be set to NULL after a fresh token is generated. +ALTER TABLE account_validity ADD token_used_ts_ms BIGINT; diff --git a/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql new file mode 100644 index 0000000000..b6ba0bda1a --- /dev/null +++ b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql @@ -0,0 +1,18 @@ +/* Copyright 2021 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Add a column to specify which instance wrote the row. Historic rows have +-- `NULL`, which indicates that the master instance wrote them. +ALTER TABLE presence_stream ADD COLUMN instance_name TEXT; diff --git a/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres new file mode 100644 index 0000000000..02b182adf9 --- /dev/null +++ b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres @@ -0,0 +1,20 @@ +/* Copyright 2021 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE SEQUENCE IF NOT EXISTS presence_stream_sequence; + +SELECT setval('presence_stream_sequence', ( + SELECT COALESCE(MAX(stream_id), 1) FROM presence_stream +)); diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py index f5e7d9ef98..6480d5a9f5 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,7 +15,7 @@ import logging import re from collections import namedtuple -from typing import List, Optional, Set +from typing import Collection, List, Optional, Set from synapse.api.errors import SynapseError from synapse.events import EventBase @@ -24,7 +23,6 @@ from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_cla from synapse.storage.database import DatabasePool from synapse.storage.databases.main.events_worker import EventRedactBehaviour from synapse.storage.engines import PostgresEngine, Sqlite3Engine -from synapse.types import Collection logger = logging.getLogger(__name__) diff --git a/synapse/storage/databases/main/signatures.py b/synapse/storage/databases/main/signatures.py index c8c67953e4..ab2159c2d3 100644 --- a/synapse/storage/databases/main/signatures.py +++ b/synapse/storage/databases/main/signatures.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 93431efe00..1757064a68 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 0dbb501f16..bff7d0404f 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index b33c93da2d..82a1833509 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018, 2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 91f8abb67d..7581c7d3ff 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd @@ -38,7 +37,7 @@ what sort order was used: import abc import logging from collections import namedtuple -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Collection, Dict, List, Optional, Set, Tuple from twisted.internet import defer @@ -54,7 +53,7 @@ from synapse.storage.database import ( from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.util.id_generators import MultiWriterIdGenerator -from synapse.types import Collection, PersistedEventPosition, RoomStreamToken +from synapse.types import PersistedEventPosition, RoomStreamToken from synapse.util.caches.descriptors import cached from synapse.util.caches.stream_change_cache import StreamChangeCache diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py index 50067eabfc..1d62c6140f 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index b7072f1f5e..82335e7a9d 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py index 5473ec1485..22c05cdde7 100644 --- a/synapse/storage/databases/main/ui_auth.py +++ b/synapse/storage/databases/main/ui_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 1026f321e5..7a082fdd21 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/main/user_erasure_store.py b/synapse/storage/databases/main/user_erasure_store.py index f9575b1f1f..acf6b2fb64 100644 --- a/synapse/storage/databases/main/user_erasure_store.py +++ b/synapse/storage/databases/main/user_erasure_store.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/state/__init__.py b/synapse/storage/databases/state/__init__.py index c90d022899..e5100d6108 100644 --- a/synapse/storage/databases/state/__init__.py +++ b/synapse/storage/databases/state/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py index 75c09b3687..c2891cb07f 100644 --- a/synapse/storage/databases/state/bg_updates.py +++ b/synapse/storage/databases/state/bg_updates.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index dfcf89d91c..e38461adbc 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index d15ccfacde..9abc02046e 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 21db1645d3..1882bfd9cf 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index dba8cc51d3..21411c5fea 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index f4f16456f2..5fe1b205e1 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index c03871f393..540adb8781 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd. # diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py index 3a0d6fb32e..33dc752d8f 100644 --- a/synapse/storage/persist_events.py +++ b/synapse/storage/persist_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -18,7 +17,7 @@ import itertools import logging from collections import deque, namedtuple -from typing import Dict, Iterable, List, Optional, Set, Tuple +from typing import Collection, Dict, Iterable, List, Optional, Set, Tuple from prometheus_client import Counter, Histogram @@ -33,7 +32,6 @@ from synapse.storage.databases import Databases from synapse.storage.databases.main.events import DeltaState from synapse.storage.databases.main.events_worker import EventRedactBehaviour from synapse.types import ( - Collection, PersistedEventPosition, RoomStreamToken, StateMap, diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index c7f0b8ccb5..7a2cbee426 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # @@ -18,7 +17,7 @@ import logging import os import re from collections import Counter -from typing import Generator, Iterable, List, Optional, TextIO, Tuple +from typing import Collection, Generator, Iterable, List, Optional, TextIO, Tuple import attr from typing_extensions import Counter as CounterType @@ -28,7 +27,6 @@ from synapse.storage.database import LoggingDatabaseConnection from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.engines.postgres import PostgresEngine from synapse.storage.types import Cursor -from synapse.types import Collection logger = logging.getLogger(__name__) diff --git a/synapse/storage/purge_events.py b/synapse/storage/purge_events.py index ad954990a7..30669beb7c 100644 --- a/synapse/storage/purge_events.py +++ b/synapse/storage/purge_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index f47cec0d86..2d5c21ef72 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/relations.py b/synapse/storage/relations.py index 2564f34b47..c552dbf04c 100644 --- a/synapse/storage/relations.py +++ b/synapse/storage/relations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index d2ff4da6b9..c34fbf21bc 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/storage/state.py b/synapse/storage/state.py index c1c147c62a..cfafba22c5 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/types.py b/synapse/storage/types.py index 17291c9d5e..57f4883bf4 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/util/__init__.py b/synapse/storage/util/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/storage/util/__init__.py +++ b/synapse/storage/util/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 32d6cc16b9..b1bd3a52d9 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py index 36a67e7019..30b6b8e0ca 100644 --- a/synapse/storage/util/sequence.py +++ b/synapse/storage/util/sequence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/streams/__init__.py b/synapse/streams/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/synapse/streams/__init__.py +++ b/synapse/streams/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/streams/config.py b/synapse/streams/config.py index fdda21d165..13d300588b 100644 --- a/synapse/streams/config.py +++ b/synapse/streams/config.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 92fd5d489f..20fceaa935 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/types.py b/synapse/types.py index 00655f6dd2..7b12231576 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # @@ -16,13 +15,11 @@ import abc import re import string -import sys from collections import namedtuple from typing import ( TYPE_CHECKING, Any, Dict, - Iterable, Mapping, MutableMapping, Optional, @@ -52,18 +49,6 @@ if TYPE_CHECKING: from synapse.appservice.api import ApplicationService from synapse.storage.databases.main import DataStore -# define a version of typing.Collection that works on python 3.5 -if sys.version_info[:3] >= (3, 6, 0): - from typing import Collection -else: - from typing import Container, Sized - - T_co = TypeVar("T_co", covariant=True) - - class Collection(Iterable[T_co], Container[T_co], Sized): # type: ignore - __slots__ = () - - # Define a state map type from type/state_key to T (usually an event ID or # event) T = TypeVar("T") @@ -215,9 +200,8 @@ def get_localpart_from_id(string): DS = TypeVar("DS", bound="DomainSpecificString") -class DomainSpecificString( - namedtuple("DomainSpecificString", ("localpart", "domain")), metaclass=abc.ABCMeta -): +@attr.s(slots=True, frozen=True, repr=False) +class DomainSpecificString(metaclass=abc.ABCMeta): """Common base class among ID/name strings that have a local part and a domain name, prefixed with a sigil. @@ -229,11 +213,8 @@ class DomainSpecificString( SIGIL = abc.abstractproperty() # type: str # type: ignore - # Deny iteration because it will bite you if you try to create a singleton - # set by: - # users = set(user) - def __iter__(self): - raise ValueError("Attempted to iterate a %s" % (type(self).__name__,)) + localpart = attr.ib(type=str) + domain = attr.ib(type=str) # Because this class is a namedtuple of strings and booleans, it is deeply # immutable. @@ -288,30 +269,35 @@ class DomainSpecificString( __repr__ = to_string +@attr.s(slots=True, frozen=True, repr=False) class UserID(DomainSpecificString): """Structure representing a user ID.""" SIGIL = "@" +@attr.s(slots=True, frozen=True, repr=False) class RoomAlias(DomainSpecificString): """Structure representing a room name.""" SIGIL = "#" +@attr.s(slots=True, frozen=True, repr=False) class RoomID(DomainSpecificString): """Structure representing a room id. """ SIGIL = "!" +@attr.s(slots=True, frozen=True, repr=False) class EventID(DomainSpecificString): """Structure representing an event id. """ SIGIL = "$" +@attr.s(slots=True, frozen=True, repr=False) class GroupID(DomainSpecificString): """Structure representing a group ID.""" diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 517686f0a6..0f84fa3f4e 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index c3b2d981ea..5c55bb0125 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 48f64eeb38..46af7fa473 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019, 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/util/caches/cached_call.py b/synapse/util/caches/cached_call.py index 3ee0f2317a..a301c9e89b 100644 --- a/synapse/util/caches/cached_call.py +++ b/synapse/util/caches/cached_call.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index dd392cf694..484097a48a 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 4e84379914..ac4a078b26 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index b3b413b02c..56d94d96ce 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 4dc3477e89..ac47a31cd7 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 20c8e2d9f5..a21d34fcb4 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 46ea8e0964..25ea1bcc91 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -111,7 +110,7 @@ class ResponseCache(Generic[T]): return result.observe() def wrap( - self, key: T, callback: "Callable[..., Any]", *args: Any, **kwargs: Any + self, key: T, callback: Callable[..., Any], *args: Any, **kwargs: Any ) -> defer.Deferred: """Wrap together a *get* and *set* call, taking care of logcontexts diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 644e9e778a..e81e468899 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,11 +14,10 @@ import logging import math -from typing import Dict, FrozenSet, List, Mapping, Optional, Set, Union +from typing import Collection, Dict, FrozenSet, List, Mapping, Optional, Set, Union from sortedcontainers import SortedDict -from synapse.types import Collection from synapse.util import caches logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 96a8274940..c276107d56 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/daemonize.py b/synapse/util/daemonize.py index 23393cf49b..31b24dd188 100644 --- a/synapse/util/daemonize.py +++ b/synapse/util/daemonize.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2012, 2013, 2014 Ilya Otyutskiy <ilya.otyutskiy@icloud.com> # Copyright 2020 The Matrix.org Foundation C.I.C. # diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index 3c47285d05..1f803aef6d 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py index 68dc632491..e946189f9a 100644 --- a/synapse/util/file_consumer.py +++ b/synapse/util/file_consumer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 5ca2e71e60..2ac7c2913c 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/hash.py b/synapse/util/hash.py index 359168704e..ba676e1762 100644 --- a/synapse/util/hash.py +++ b/synapse/util/hash.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 98707c119d..abfdc29832 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -16,6 +15,7 @@ import heapq from itertools import islice from typing import ( + Collection, Dict, Generator, Iterable, @@ -27,8 +27,6 @@ from typing import ( TypeVar, ) -from synapse.types import Collection - T = TypeVar("T") diff --git a/synapse/util/jsonobject.py b/synapse/util/jsonobject.py index e3a8ed5b2f..abc12f0837 100644 --- a/synapse/util/jsonobject.py +++ b/synapse/util/jsonobject.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py index 12cdd53327..f6ebfd7e7d 100644 --- a/synapse/util/macaroons.py +++ b/synapse/util/macaroons.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # Copyright 2021 The Matrix.org Foundation C.I.C. # diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 019cfa17cc..6d14351bd2 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index d184e2a90c..8acbe276e4 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/msisdn.py b/synapse/util/msisdn.py index c8bcbe297a..bbbdebf264 100644 --- a/synapse/util/msisdn.py +++ b/synapse/util/msisdn.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py index d9f9ae99d6..eed0291cae 100644 --- a/synapse/util/patch_inline_callbacks.py +++ b/synapse/util/patch_inline_callbacks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 70d11e1ec3..a654c69684 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 4ab379e429..f9c370a814 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/rlimit.py b/synapse/util/rlimit.py index 207cd17c2a..bf812ab516 100644 --- a/synapse/util/rlimit.py +++ b/synapse/util/rlimit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 9ce7873ab5..cd82777f80 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. # @@ -133,6 +132,38 @@ def parse_and_validate_server_name(server_name: str) -> Tuple[str, Optional[int] return host, port +def valid_id_server_location(id_server: str) -> bool: + """Check whether an identity server location, such as the one passed as the + `id_server` parameter to `/_matrix/client/r0/account/3pid/bind`, is valid. + + A valid identity server location consists of a valid hostname and optional + port number, optionally followed by any number of `/` delimited path + components, without any fragment or query string parts. + + Args: + id_server: identity server location string to validate + + Returns: + True if valid, False otherwise. + """ + + components = id_server.split("/", 1) + + host = components[0] + + try: + parse_and_validate_server_name(host) + except ValueError: + return False + + if len(components) < 2: + # no path + return True + + path = components[1] + return "#" not in path and "?" not in path + + def parse_and_validate_mxc_uri(mxc: str) -> Tuple[str, Optional[int], str]: """Parse the given string as an MXC URI diff --git a/synapse/util/templates.py b/synapse/util/templates.py index 392dae4a40..38543dd1ea 100644 --- a/synapse/util/templates.py +++ b/synapse/util/templates.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/threepids.py b/synapse/util/threepids.py index 63f955acff..eb005007d5 100644 --- a/synapse/util/threepids.py +++ b/synapse/util/threepids.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,8 +18,18 @@ import re logger = logging.getLogger(__name__) +# it's unclear what the maximum length of an email address is. RFC3696 (as corrected +# by errata) says: +# the upper limit on address lengths should normally be considered to be 254. +# +# In practice, mail servers appear to be more tolerant and allow 400 characters +# or so. Let's allow 500, which should be plenty for everyone. +# +MAX_EMAIL_ADDRESS_LENGTH = 500 + + async def check_3pid_allowed(hs, medium, address): - """Checks whether a given 3PID is allowed to be used on this HS + """Checks whether a given format of 3PID is allowed to be used on this HS Args: hs (synapse.server.HomeServer): server @@ -98,3 +107,23 @@ def canonicalise_email(address: str) -> str: raise ValueError("Unable to parse email address") return parts[0].casefold() + "@" + parts[1].lower() + + +def validate_email(address: str) -> str: + """Does some basic validation on an email address. + + Returns the canonicalised email, as returned by `canonicalise_email`. + + Raises a ValueError if the email is invalid. + """ + # First we try canonicalising in case that fails + address = canonicalise_email(address) + + # Email addresses have to be at least 3 characters. + if len(address) < 3: + raise ValueError("Unable to parse email address") + + if len(address) > MAX_EMAIL_ADDRESS_LENGTH: + raise ValueError("Unable to parse email address") + + return address diff --git a/synapse/util/versionstring.py b/synapse/util/versionstring.py index ab7d03af3a..dfa30a6229 100644 --- a/synapse/util/versionstring.py +++ b/synapse/util/versionstring.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/util/wheel_timer.py b/synapse/util/wheel_timer.py index be3b22469d..61814aff24 100644 --- a/synapse/util/wheel_timer.py +++ b/synapse/util/wheel_timer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synapse/visibility.py b/synapse/visibility.py index ff53a49b3a..490fb26e81 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synctl b/synctl index 56c0e3940f..ccf404accb 100755 --- a/synctl +++ b/synctl @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/synmark/__init__.py b/synmark/__init__.py index 3d4ec3e184..2cc00b0f03 100644 --- a/synmark/__init__.py +++ b/synmark/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synmark/__main__.py b/synmark/__main__.py index f55968a5a4..35a59e347a 100644 --- a/synmark/__main__.py +++ b/synmark/__main__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py index b3abc6b254..9419892e95 100644 --- a/synmark/suites/logging.py +++ b/synmark/suites/logging.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synmark/suites/lrucache.py b/synmark/suites/lrucache.py index 69ab042ccc..9b4a424149 100644 --- a/synmark/suites/lrucache.py +++ b/synmark/suites/lrucache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/synmark/suites/lrucache_evict.py b/synmark/suites/lrucache_evict.py index 532b1cc702..0ee202ed36 100644 --- a/synmark/suites/lrucache_evict.py +++ b/synmark/suites/lrucache_evict.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/__init__.py b/tests/__init__.py index ed805db1c2..5fced5cc4c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 28d77f0ca2..c0ed64f784 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index ab7d290724..f44c91a373 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py deleted file mode 100644 index e0ca288829..0000000000 --- a/tests/app/test_frontend_proxy.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse.app.generic_worker import GenericWorkerServer - -from tests.server import make_request -from tests.unittest import HomeserverTestCase - - -class FrontendProxyTests(HomeserverTestCase): - def make_homeserver(self, reactor, clock): - - hs = self.setup_test_homeserver( - federation_http_client=None, homeserver_to_use=GenericWorkerServer - ) - - return hs - - def default_config(self): - c = super().default_config() - c["worker_app"] = "synapse.app.frontend_proxy" - - c["worker_listeners"] = [ - { - "type": "http", - "port": 8080, - "bind_addresses": ["0.0.0.0"], - "resources": [{"names": ["client"]}], - } - ] - - return c - - def test_listen_http_with_presence_enabled(self): - """ - When presence is on, the stub servlet will not register. - """ - # Presence is on - self.hs.config.use_presence = True - - # Listen with the config - self.hs._listen_http(self.hs.config.worker.worker_listeners[0]) - - # Grab the resource from the site that was told to listen - self.assertEqual(len(self.reactor.tcpServers), 1) - site = self.reactor.tcpServers[0][1] - - channel = make_request(self.reactor, site, "PUT", "presence/a/status") - - # 400 + unrecognised, because nothing is registered - self.assertEqual(channel.code, 400) - self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") - - def test_listen_http_with_presence_disabled(self): - """ - When presence is off, the stub servlet will register. - """ - # Presence is off - self.hs.config.use_presence = False - - # Listen with the config - self.hs._listen_http(self.hs.config.worker.worker_listeners[0]) - - # Grab the resource from the site that was told to listen - self.assertEqual(len(self.reactor.tcpServers), 1) - site = self.reactor.tcpServers[0][1] - - channel = make_request(self.reactor, site, "PUT", "presence/a/status") - - # 401, because the stub servlet still checks authentication - self.assertEqual(channel.code, 401) - self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN") diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 33a37fe35e..264e101082 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -110,7 +109,7 @@ class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): } # Listen with the config - self.hs._listener_http(self.hs.get_config(), parse_listener_def(config)) + self.hs._listener_http(self.hs.config, parse_listener_def(config)) # Grab the resource from the site that was told to listen site = self.reactor.tcpServers[0][1] diff --git a/tests/appservice/__init__.py b/tests/appservice/__init__.py index fe0ac3f8e9..629e2df74a 100644 --- a/tests/appservice/__init__.py +++ b/tests/appservice/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 03a7440eec..f386b5e128 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index 3c27d797fb..a2b5ed2030 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/__init__.py b/tests/config/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/tests/config/__init__.py +++ b/tests/config/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_base.py b/tests/config/test_base.py index 42ee5f56d9..84ae3b88ae 100644 --- a/tests/config/test_base.py +++ b/tests/config/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py index 2b7f09c14b..857d9cd096 100644 --- a/tests/config/test_cache.py +++ b/tests/config/test_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_database.py b/tests/config/test_database.py index f675bde68e..9eca10bbe9 100644 --- a/tests/config/test_database.py +++ b/tests/config/test_database.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index 463855ecc8..fdfbb0e38e 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_load.py b/tests/config/test_load.py index c109425671..ebe2c05165 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py index 13ab282384..3c7bb32e07 100644 --- a/tests/config/test_ratelimiting.py +++ b/tests/config/test_ratelimiting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index 0ec10019b3..db745815ef 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_server.py b/tests/config/test_server.py index 98af7aa675..6f2b9e997d 100644 --- a/tests/config/test_server.py +++ b/tests/config/test_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index ec32d4b1ca..183034f7d4 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # Copyright 2019 Matrix.org Foundation C.I.C. # diff --git a/tests/config/test_util.py b/tests/config/test_util.py index 10363e3765..3d4929daac 100644 --- a/tests/config/test_util.py +++ b/tests/config/test_util.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/crypto/__init__.py b/tests/crypto/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/tests/crypto/__init__.py +++ b/tests/crypto/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index 62f639a18d..1c920157f5 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index a56063315b..2775dfd880 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index c996ecc221..01d257307c 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the 'License'); diff --git a/tests/events/test_snapshot.py b/tests/events/test_snapshot.py index ec85324c0c..48e98aac79 100644 --- a/tests/events/test_snapshot.py +++ b/tests/events/test_snapshot.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index 8ba36c6074..9274ce4c39 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the 'License'); diff --git a/tests/federation/test_complexity.py b/tests/federation/test_complexity.py index 701fa8379f..1a809b2a6a 100644 --- a/tests/federation/test_complexity.py +++ b/tests/federation/test_complexity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 Matrix.org Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index deb12433cf..b00dd143d6 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index cfeccc0577..8508b6bd3b 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2019 Matrix.org Federation C.I.C # diff --git a/tests/federation/transport/test_server.py b/tests/federation/transport/test_server.py index 85500e169c..84fa72b9ff 100644 --- a/tests/federation/transport/test_server.py +++ b/tests/federation/transport/test_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_admin.py b/tests/handlers/test_admin.py index 32669ae9ce..18a734daf4 100644 --- a/tests/handlers/test_admin.py +++ b/tests/handlers/test_admin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 6e325b24ce..b037b12a0f 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 321c5ba045..fe7e9484fd 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index 0444b26798..b625995d12 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -13,7 +13,7 @@ # limitations under the License. from unittest.mock import Mock -from synapse.handlers.cas_handler import CasResponse +from synapse.handlers.cas import CasResponse from tests.test_utils import simple_async_mock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 821629bc38..84c38b295d 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C. diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index a8d0cf6603..70ebaf2c78 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 6915ac0205..61a00130b8 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 07893302ec..9b7e7a8e9a 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2017 New Vector Ltd # Copyright 2019 Matrix.org Foundation C.I.C. diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 3af361195b..8796af45ed 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -223,7 +222,7 @@ class FederationTestCase(unittest.HomeserverTestCase): room_version, ) - for i in range(3): + for _ in range(3): event = create_invite() self.get_success( self.handler.on_invite_request( diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index a0d1ebdbe3..a8a9fc5b62 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 8702ee70e0..a25c89bd5b 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -500,7 +499,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertRenderedError("fetch_error") # Handle code exchange failure - from synapse.handlers.oidc_handler import OidcError + from synapse.handlers.oidc import OidcError self.provider._exchange_code = simple_async_mock( raises=OidcError("invalid_request") @@ -584,7 +583,7 @@ class OidcHandlerTestCase(HomeserverTestCase): body=b'{"error": "foo", "error_description": "bar"}', ) ) - from synapse.handlers.oidc_handler import OidcError + from synapse.handlers.oidc import OidcError exc = self.get_failure(self.provider._exchange_code(code), OidcError) self.assertEqual(exc.value.error, "foo") @@ -1127,7 +1126,7 @@ class OidcHandlerTestCase(HomeserverTestCase): client_redirect_url: str, ui_auth_session_id: str = "", ) -> str: - from synapse.handlers.oidc_handler import OidcSessionData + from synapse.handlers.oidc import OidcSessionData return self.handler._token_generator.generate_oidc_session_token( state=state, @@ -1153,7 +1152,7 @@ async def _make_callback_with_userinfo( userinfo: the OIDC userinfo dict client_redirect_url: the URL to redirect to on success. """ - from synapse.handlers.oidc_handler import OidcSessionData + from synapse.handlers.oidc import OidcSessionData handler = hs.get_oidc_handler() provider = handler._providers["oidc"] diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index e28e4159eb..32651db096 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 9f16cc65fc..ce330e79cc 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,6 +21,7 @@ from synapse.api.constants import EventTypes, Membership, PresenceState from synapse.api.presence import UserPresenceState from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events.builder import EventBuilder +from synapse.federation.sender import FederationSender from synapse.handlers.presence import ( EXTERNAL_PROCESS_EXPIRY, FEDERATION_PING_INTERVAL, @@ -472,6 +472,190 @@ class PresenceHandlerTestCase(unittest.HomeserverTestCase): self.assertEqual(state.state, PresenceState.OFFLINE) +class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): + def prepare(self, reactor, clock, hs): + self.presence_handler = hs.get_presence_handler() + self.clock = hs.get_clock() + self.instance_name = hs.get_instance_name() + + self.queue = self.presence_handler.get_federation_queue() + + def test_send_and_get(self): + state1 = UserPresenceState.default("@user1:test") + state2 = UserPresenceState.default("@user2:test") + state3 = UserPresenceState.default("@user3:test") + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + now_token = self.queue.get_current_token(self.instance_name) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (1, ("dest1", "@user1:test")), + (1, ("dest2", "@user1:test")), + (1, ("dest1", "@user2:test")), + (1, ("dest2", "@user2:test")), + (2, ("dest3", "@user3:test")), + ] + + self.assertCountEqual(rows, expected_rows) + + now_token = self.queue.get_current_token(self.instance_name) + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", upto_token, now_token, 10) + ) + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + self.assertCountEqual(rows, []) + + def test_send_and_get_split(self): + state1 = UserPresenceState.default("@user1:test") + state2 = UserPresenceState.default("@user2:test") + state3 = UserPresenceState.default("@user3:test") + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + + now_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (1, ("dest1", "@user1:test")), + (1, ("dest2", "@user1:test")), + (1, ("dest1", "@user2:test")), + (1, ("dest2", "@user2:test")), + ] + + self.assertCountEqual(rows, expected_rows) + + now_token = self.queue.get_current_token(self.instance_name) + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", upto_token, now_token, 10) + ) + + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (2, ("dest3", "@user3:test")), + ] + + self.assertCountEqual(rows, expected_rows) + + def test_clear_queue_all(self): + state1 = UserPresenceState.default("@user1:test") + state2 = UserPresenceState.default("@user2:test") + state3 = UserPresenceState.default("@user3:test") + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + self.reactor.advance(10 * 60 * 1000) + + now_token = self.queue.get_current_token(self.instance_name) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + self.assertCountEqual(rows, []) + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + now_token = self.queue.get_current_token(self.instance_name) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (3, ("dest1", "@user1:test")), + (3, ("dest2", "@user1:test")), + (3, ("dest1", "@user2:test")), + (3, ("dest2", "@user2:test")), + (4, ("dest3", "@user3:test")), + ] + + self.assertCountEqual(rows, expected_rows) + + def test_partially_clear_queue(self): + state1 = UserPresenceState.default("@user1:test") + state2 = UserPresenceState.default("@user2:test") + state3 = UserPresenceState.default("@user3:test") + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + + self.reactor.advance(2 * 60 * 1000) + + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + self.reactor.advance(4 * 60 * 1000) + + now_token = self.queue.get_current_token(self.instance_name) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (2, ("dest3", "@user3:test")), + ] + self.assertCountEqual(rows, []) + + prev_token = self.queue.get_current_token(self.instance_name) + + self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2")) + self.queue.send_presence_to_destinations((state3,), ("dest3",)) + + now_token = self.queue.get_current_token(self.instance_name) + + rows, upto_token, limited = self.get_success( + self.queue.get_replication_rows("master", prev_token, now_token, 10) + ) + self.assertEqual(upto_token, now_token) + self.assertFalse(limited) + + expected_rows = [ + (3, ("dest1", "@user1:test")), + (3, ("dest2", "@user1:test")), + (3, ("dest1", "@user2:test")), + (3, ("dest2", "@user2:test")), + (4, ("dest3", "@user3:test")), + ] + + self.assertCountEqual(rows, expected_rows) + + class PresenceJoinTestCase(unittest.HomeserverTestCase): """Tests remote servers get told about presence of users in the room when they join and when new local users join. @@ -483,10 +667,17 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver( - "server", federation_http_client=None, federation_sender=Mock() + "server", + federation_http_client=None, + federation_sender=Mock(spec=FederationSender), ) return hs + def default_config(self): + config = super().default_config() + config["send_federation"] = True + return config + def prepare(self, reactor, clock, hs): self.federation_sender = hs.get_federation_sender() self.event_builder_factory = hs.get_event_builder_factory() @@ -530,9 +721,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): # Add a new remote server to the room self._add_new_user(room_id, "@alice:server2") - # We shouldn't have sent out any local presence *updates* - self.federation_sender.send_presence.assert_not_called() - # When new server is joined we send it the local users presence states. # We expect to only see user @test2:server, as @test:server is offline # and has a zero last_active_ts @@ -551,7 +739,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): self.federation_sender.reset_mock() self._add_new_user(room_id, "@bob:server3") - self.federation_sender.send_presence.assert_not_called() self.federation_sender.send_presence_to_destinations.assert_called_once_with( destinations=["server3"], states={expected_state} ) @@ -596,9 +783,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): self.reactor.pump([0]) # Wait for presence updates to be handled - # We shouldn't have sent out any local presence *updates* - self.federation_sender.send_presence.assert_not_called() - # We expect to only send test2 presence to server2 and server3 expected_state = self.get_success( self.presence_handler.current_state_for_user("@test2:server") diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 60f2458c98..8834d21f0d 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index c30b414d99..2812d7e8f5 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index 0229f58315..69cc38b01e 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 8e950f25c5..c8b43305f4 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 9fa231a37a..0c89487eaf 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 67a8e49945..453686cc81 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/__init__.py b/tests/http/__init__.py index 3e5a856584..e74f7f5b48 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/federation/__init__.py b/tests/http/federation/__init__.py index 1453d04571..743fb9904a 100644 --- a/tests/http/federation/__init__.py +++ b/tests/http/federation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index e6b20799e5..4f355154d0 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index 466ce722d9..c49be33b9f 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # diff --git a/tests/http/test_additional_resource.py b/tests/http/test_additional_resource.py index 453391a5a5..768c2ba4ea 100644 --- a/tests/http/test_additional_resource.py +++ b/tests/http/test_additional_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py index d06ea518ce..1f9a2f9b1d 100644 --- a/tests/http/test_endpoint.py +++ b/tests/http/test_endpoint.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index 21c1297171..ed9a884d76 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,6 +26,7 @@ from twisted.web.http import HTTPChannel from synapse.api.errors import RequestSendFailed from synapse.http.matrixfederationclient import ( + MAX_RESPONSE_SIZE, MatrixFederationHttpClient, MatrixFederationRequest, ) @@ -561,3 +561,61 @@ class FederationClientTests(HomeserverTestCase): f = self.failureResultOf(test_d) self.assertIsInstance(f.value, RequestSendFailed) + + def test_too_big(self): + """ + Test what happens if a huge response is returned from the remote endpoint. + """ + + test_d = defer.ensureDeferred(self.cl.get_json("testserv:8008", "foo/bar")) + + self.pump() + + # Nothing happened yet + self.assertNoResult(test_d) + + # Make sure treq is trying to connect + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, "1.2.3.4") + self.assertEqual(port, 8008) + + # complete the connection and wire it up to a fake transport + protocol = factory.buildProtocol(None) + transport = StringTransport() + protocol.makeConnection(transport) + + # that should have made it send the request to the transport + self.assertRegex(transport.value(), b"^GET /foo/bar") + self.assertRegex(transport.value(), b"Host: testserv:8008") + + # Deferred is still without a result + self.assertNoResult(test_d) + + # Send it a huge HTTP response + protocol.dataReceived( + b"HTTP/1.1 200 OK\r\n" + b"Server: Fake\r\n" + b"Content-Type: application/json\r\n" + b"\r\n" + ) + + self.pump() + + # should still be waiting + self.assertNoResult(test_d) + + sent = 0 + chunk_size = 1024 * 512 + while not test_d.called: + protocol.dataReceived(b"a" * chunk_size) + sent += chunk_size + self.assertLessEqual(sent, MAX_RESPONSE_SIZE) + + self.assertEqual(sent, MAX_RESPONSE_SIZE) + + f = self.failureResultOf(test_d) + self.assertIsInstance(f.value, RequestSendFailed) + + self.assertTrue(transport.disconnecting) diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py index 3ea8b5bec7..fefc8099c9 100644 --- a/tests/http/test_proxyagent.py +++ b/tests/http/test_proxyagent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py index f979c96f7c..a80bfb9f4e 100644 --- a/tests/http/test_servlet.py +++ b/tests/http/test_servlet.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/test_simple_client.py b/tests/http/test_simple_client.py index cc4cae320d..c85a3665c1 100644 --- a/tests/http/test_simple_client.py +++ b/tests/http/test_simple_client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/http/test_site.py b/tests/http/test_site.py new file mode 100644 index 0000000000..8c13b4f693 --- /dev/null +++ b/tests/http/test_site.py @@ -0,0 +1,83 @@ +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet.address import IPv6Address +from twisted.test.proto_helpers import StringTransport + +from synapse.app.homeserver import SynapseHomeServer + +from tests.unittest import HomeserverTestCase + + +class SynapseRequestTestCase(HomeserverTestCase): + def make_homeserver(self, reactor, clock): + return self.setup_test_homeserver(homeserver_to_use=SynapseHomeServer) + + def test_large_request(self): + """overlarge HTTP requests should be rejected""" + self.hs.start_listening() + + # find the HTTP server which is configured to listen on port 0 + (port, factory, _backlog, interface) = self.reactor.tcpServers[0] + self.assertEqual(interface, "::") + self.assertEqual(port, 0) + + # as a control case, first send a regular request. + + # complete the connection and wire it up to a fake transport + client_address = IPv6Address("TCP", "::1", "2345") + protocol = factory.buildProtocol(client_address) + transport = StringTransport() + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + b"0\r\n" + b"\r\n" + ) + + while not transport.disconnecting: + self.reactor.advance(1) + + # we should get a 404 + self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 404 ") + + # now send an oversized request + protocol = factory.buildProtocol(client_address) + transport = StringTransport() + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + + # we deliberately send all the data in one big chunk, to ensure that + # twisted isn't buffering the data in the chunked transfer decoder. + # we start with the chunk size, in hex. (We won't actually send this much) + protocol.dataReceived(b"10000000\r\n") + sent = 0 + while not transport.disconnected: + self.assertLess(sent, 0x10000000, "connection did not drop") + protocol.dataReceived(b"\0" * 1024) + sent += 1024 + + # default max upload size is 50M, so it should drop on the next buffer after + # that. + self.assertEqual(sent, 50 * 1024 * 1024 + 1024) diff --git a/tests/logging/__init__.py b/tests/logging/__init__.py index a58d51441c..1acf5666a8 100644 --- a/tests/logging/__init__.py +++ b/tests/logging/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/logging/test_remote_handler.py b/tests/logging/test_remote_handler.py index 4bc27a1d7d..b0d046fe00 100644 --- a/tests/logging/test_remote_handler.py +++ b/tests/logging/test_remote_handler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/logging/test_terse_json.py b/tests/logging/test_terse_json.py index ecf873e2ab..1160716929 100644 --- a/tests/logging/test_terse_json.py +++ b/tests/logging/test_terse_json.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index 349f93560e..742ad14b8c 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 941cf42429..e04bc5c9a6 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/push/test_http.py b/tests/push/test_http.py index f590e8d21c..9df8cbc945 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 4a841f5bb8..45906ce720 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/__init__.py b/tests/replication/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/tests/replication/__init__.py +++ b/tests/replication/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/_base.py b/tests/replication/_base.py index aff19d9fb3..624bd1b927 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,22 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Any, Callable, Dict, List, Optional, Tuple, Type +from typing import Any, Callable, Dict, List, Optional, Tuple -from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime from twisted.internet.protocol import Protocol -from twisted.internet.task import LoopingCall -from twisted.web.http import HTTPChannel from twisted.web.resource import Resource -from twisted.web.server import Request, Site -from synapse.app.generic_worker import ( - GenericWorkerReplicationHandler, - GenericWorkerServer, -) +from synapse.app.generic_worker import GenericWorkerServer from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest, SynapseSite from synapse.replication.http import ReplicationRestResource +from synapse.replication.tcp.client import ReplicationDataHandler from synapse.replication.tcp.handler import ReplicationCommandHandler from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol from synapse.replication.tcp.resource import ( @@ -36,7 +29,6 @@ from synapse.replication.tcp.resource import ( ServerReplicationStreamProtocol, ) from synapse.server import HomeServer -from synapse.util import Clock from tests import unittest from tests.server import FakeTransport @@ -157,7 +149,19 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): client_protocol = client_factory.buildProtocol(None) # Set up the server side protocol - channel = _PushHTTPChannel(self.reactor, SynapseRequest, self.site) + channel = self.site.buildProtocol(None) + + # hook into the channel's request factory so that we can keep a record + # of the requests + requests: List[SynapseRequest] = [] + real_request_factory = channel.requestFactory + + def request_factory(*args, **kwargs): + request = real_request_factory(*args, **kwargs) + requests.append(request) + return request + + channel.requestFactory = request_factory # Connect client to server and vice versa. client_to_server_transport = FakeTransport( @@ -179,7 +183,10 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): server_to_client_transport.loseConnection() client_to_server_transport.loseConnection() - return channel.request + # there should have been exactly one request + self.assertEqual(len(requests), 1) + + return requests[0] def assert_request_is_get_repl_stream_updates( self, request: SynapseRequest, stream_name: str @@ -352,6 +359,8 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): config=worker_hs.config.server.listeners[0], resource=resource, server_version_string="1", + max_request_body_size=4096, + reactor=self.reactor, ) if worker_hs.config.redis.redis_enabled: @@ -389,7 +398,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): client_protocol = client_factory.buildProtocol(None) # Set up the server side protocol - channel = _PushHTTPChannel(self.reactor, SynapseRequest, self._hs_to_site[hs]) + channel = self._hs_to_site[hs].buildProtocol(None) # Connect client to server and vice versa. client_to_server_transport = FakeTransport( @@ -432,7 +441,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): server_protocol.makeConnection(server_to_client_transport) -class TestReplicationDataHandler(GenericWorkerReplicationHandler): +class TestReplicationDataHandler(ReplicationDataHandler): """Drop-in for ReplicationDataHandler which just collects RDATA rows""" def __init__(self, hs: HomeServer): @@ -447,112 +456,6 @@ class TestReplicationDataHandler(GenericWorkerReplicationHandler): self.received_rdata_rows.append((stream_name, token, r)) -class _PushHTTPChannel(HTTPChannel): - """A HTTPChannel that wraps pull producers to push producers. - - This is a hack to get around the fact that HTTPChannel transparently wraps a - pull producer (which is what Synapse uses to reply to requests) with - `_PullToPush` to convert it to a push producer. Unfortunately `_PullToPush` - uses the standard reactor rather than letting us use our test reactor, which - makes it very hard to test. - """ - - def __init__( - self, reactor: IReactorTime, request_factory: Type[Request], site: Site - ): - super().__init__() - self.reactor = reactor - self.requestFactory = request_factory - self.site = site - - self._pull_to_push_producer = None # type: Optional[_PullToPushProducer] - - def registerProducer(self, producer, streaming): - # Convert pull producers to push producer. - if not streaming: - self._pull_to_push_producer = _PullToPushProducer( - self.reactor, producer, self - ) - producer = self._pull_to_push_producer - - super().registerProducer(producer, True) - - def unregisterProducer(self): - if self._pull_to_push_producer: - # We need to manually stop the _PullToPushProducer. - self._pull_to_push_producer.stop() - - def checkPersistence(self, request, version): - """Check whether the connection can be re-used""" - # We hijack this to always say no for ease of wiring stuff up in - # `handle_http_replication_attempt`. - request.responseHeaders.setRawHeaders(b"connection", [b"close"]) - return False - - def requestDone(self, request): - # Store the request for inspection. - self.request = request - super().requestDone(request) - - -class _PullToPushProducer: - """A push producer that wraps a pull producer.""" - - def __init__( - self, reactor: IReactorTime, producer: IPullProducer, consumer: IConsumer - ): - self._clock = Clock(reactor) - self._producer = producer - self._consumer = consumer - - # While running we use a looping call with a zero delay to call - # resumeProducing on given producer. - self._looping_call = None # type: Optional[LoopingCall] - - # We start writing next reactor tick. - self._start_loop() - - def _start_loop(self): - """Start the looping call to""" - - if not self._looping_call: - # Start a looping call which runs every tick. - self._looping_call = self._clock.looping_call(self._run_once, 0) - - def stop(self): - """Stops calling resumeProducing.""" - if self._looping_call: - self._looping_call.stop() - self._looping_call = None - - def pauseProducing(self): - """Implements IPushProducer""" - self.stop() - - def resumeProducing(self): - """Implements IPushProducer""" - self._start_loop() - - def stopProducing(self): - """Implements IPushProducer""" - self.stop() - self._producer.stopProducing() - - def _run_once(self): - """Calls resumeProducing on producer once.""" - - try: - self._producer.resumeProducing() - except Exception: - logger.exception("Failed to call resumeProducing") - try: - self._consumer.unregisterProducer() - except Exception: - pass - - self.stopProducing() - - class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" diff --git a/tests/replication/slave/__init__.py b/tests/replication/slave/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/tests/replication/slave/__init__.py +++ b/tests/replication/slave/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/slave/storage/__init__.py b/tests/replication/slave/storage/__init__.py index b7df13c9ee..f43a360a80 100644 --- a/tests/replication/slave/storage/__init__.py +++ b/tests/replication/slave/storage/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/__init__.py b/tests/replication/tcp/__init__.py index 1453d04571..743fb9904a 100644 --- a/tests/replication/tcp/__init__.py +++ b/tests/replication/tcp/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/streams/__init__.py b/tests/replication/tcp/streams/__init__.py index 1453d04571..743fb9904a 100644 --- a/tests/replication/tcp/streams/__init__.py +++ b/tests/replication/tcp/streams/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/streams/test_account_data.py b/tests/replication/tcp/streams/test_account_data.py index 153634d4ee..cdd052001b 100644 --- a/tests/replication/tcp/streams/test_account_data.py +++ b/tests/replication/tcp/streams/test_account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py index 77856fc304..f51fa0a79e 100644 --- a/tests/replication/tcp/streams/test_events.py +++ b/tests/replication/tcp/streams/test_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -240,7 +239,7 @@ class EventsStreamTestCase(BaseStreamTestCase): # the state rows are unsorted state_rows = [] # type: List[EventsStreamCurrentStateRow] - for stream_name, token, row in received_rows: + for stream_name, _, row in received_rows: self.assertEqual("events", stream_name) self.assertIsInstance(row, EventsStreamRow) self.assertEqual(row.type, "state") @@ -357,7 +356,7 @@ class EventsStreamTestCase(BaseStreamTestCase): # the state rows are unsorted state_rows = [] # type: List[EventsStreamCurrentStateRow] - for j in range(STATES_PER_USER + 1): + for _ in range(STATES_PER_USER + 1): stream_name, token, row = received_rows.pop(0) self.assertEqual("events", stream_name) self.assertIsInstance(row, EventsStreamRow) diff --git a/tests/replication/tcp/streams/test_federation.py b/tests/replication/tcp/streams/test_federation.py index aa4bf1c7e3..ffec06a0d6 100644 --- a/tests/replication/tcp/streams/test_federation.py +++ b/tests/replication/tcp/streams/test_federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/streams/test_receipts.py b/tests/replication/tcp/streams/test_receipts.py index 7d848e41ff..7f5d932f0b 100644 --- a/tests/replication/tcp/streams/test_receipts.py +++ b/tests/replication/tcp/streams/test_receipts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py index 4a0b342264..ecd360c2d0 100644 --- a/tests/replication/tcp/streams/test_typing.py +++ b/tests/replication/tcp/streams/test_typing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/test_commands.py b/tests/replication/tcp/test_commands.py index 60c10a441a..cca7ebb719 100644 --- a/tests/replication/tcp/test_commands.py +++ b/tests/replication/tcp/test_commands.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/tcp/test_remote_server_up.py b/tests/replication/tcp/test_remote_server_up.py index 1fe9d5b4d0..262c35cef3 100644 --- a/tests/replication/tcp/test_remote_server_up.py +++ b/tests/replication/tcp/test_remote_server_up.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_auth.py b/tests/replication/test_auth.py index f8fd8a843c..1346e0e160 100644 --- a/tests/replication/test_auth.py +++ b/tests/replication/test_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_client_reader_shard.py b/tests/replication/test_client_reader_shard.py index 5da1d5dc4d..b9751efdc5 100644 --- a/tests/replication/test_client_reader_shard.py +++ b/tests/replication/test_client_reader_shard.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_federation_ack.py b/tests/replication/test_federation_ack.py index 44ad5eec57..04a869e295 100644 --- a/tests/replication/test_federation_ack.py +++ b/tests/replication/test_federation_ack.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index 8ca595c3ee..48ab3aa4e3 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index b0800f9840..76e6644353 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_pusher_shard.py b/tests/replication/test_pusher_shard.py index 1f12bde1aa..1e4e3821b9 100644 --- a/tests/replication/test_pusher_shard.py +++ b/tests/replication/test_pusher_shard.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/replication/test_sharded_event_persister.py b/tests/replication/test_sharded_event_persister.py index 6c2e1674cb..d739eb6b17 100644 --- a/tests/replication/test_sharded_event_persister.py +++ b/tests/replication/test_sharded_event_persister.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/__init__.py b/tests/rest/__init__.py index fe0ac3f8e9..629e2df74a 100644 --- a/tests/rest/__init__.py +++ b/tests/rest/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/admin/__init__.py b/tests/rest/admin/__init__.py index 1453d04571..743fb9904a 100644 --- a/tests/rest/admin/__init__.py +++ b/tests/rest/admin/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 4abcbe3f55..2f7090e554 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/admin/test_device.py b/tests/rest/admin/test_device.py index 2a1bcf1760..120730b764 100644 --- a/tests/rest/admin/test_device.py +++ b/tests/rest/admin/test_device.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -431,7 +430,7 @@ class DevicesRestTestCase(unittest.HomeserverTestCase): """ # Create devices number_devices = 5 - for n in range(number_devices): + for _ in range(number_devices): self.login("user", "pass") # Get devices @@ -548,7 +547,7 @@ class DeleteDevicesRestTestCase(unittest.HomeserverTestCase): # Create devices number_devices = 5 - for n in range(number_devices): + for _ in range(number_devices): self.login("user", "pass") # Get devices diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py index e30ffe4fa0..29341bc6e9 100644 --- a/tests/rest/admin/test_event_reports.py +++ b/tests/rest/admin/test_event_reports.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -49,22 +48,22 @@ class EventReportsTestCase(unittest.HomeserverTestCase): self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok) # Two rooms and two users. Every user sends and reports every room event - for i in range(5): + for _ in range(5): self._create_event_and_report( room_id=self.room_id1, user_tok=self.other_user_tok, ) - for i in range(5): + for _ in range(5): self._create_event_and_report( room_id=self.room_id2, user_tok=self.other_user_tok, ) - for i in range(5): + for _ in range(5): self._create_event_and_report( room_id=self.room_id1, user_tok=self.admin_user_tok, ) - for i in range(5): + for _ in range(5): self._create_event_and_report( room_id=self.room_id2, user_tok=self.admin_user_tok, diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index 31db472cd3..ac7b219700 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 85f77c0a65..6b84188120 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -616,7 +615,7 @@ class RoomTestCase(unittest.HomeserverTestCase): # Create 3 test rooms total_rooms = 3 room_ids = [] - for x in range(total_rooms): + for _ in range(total_rooms): room_id = self.helper.create_room_as( self.admin_user, tok=self.admin_user_tok ) @@ -680,7 +679,7 @@ class RoomTestCase(unittest.HomeserverTestCase): # Create 5 test rooms total_rooms = 5 room_ids = [] - for x in range(total_rooms): + for _ in range(total_rooms): room_id = self.helper.create_room_as( self.admin_user, tok=self.admin_user_tok ) @@ -1578,7 +1577,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase): channel.json_body["event"]["event_id"], events[midway]["event_id"] ) - for i, found_event in enumerate(channel.json_body["events_before"]): + for found_event in channel.json_body["events_before"]: for j, posted_event in enumerate(events): if found_event["event_id"] == posted_event["event_id"]: self.assertTrue(j < midway) @@ -1586,7 +1585,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase): else: self.fail("Event %s from events_before not found" % j) - for i, found_event in enumerate(channel.json_body["events_after"]): + for found_event in channel.json_body["events_after"]: for j, posted_event in enumerate(events): if found_event["event_id"] == posted_event["event_id"]: self.assertTrue(j > midway) diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py index 1f1d11f527..79cac4266b 100644 --- a/tests/rest/admin/test_statistics.py +++ b/tests/rest/admin/test_statistics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -468,7 +467,7 @@ class UserMediaStatisticsTestCase(unittest.HomeserverTestCase): number_media: Number of media to be created for the user """ upload_resource = self.media_repo.children[b"upload"] - for i in range(number_media): + for _ in range(number_media): # file size is 67 Byte image_data = unhexlify( b"89504e470d0a1a0a0000000d4948445200000001000000010806" diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index a7b600a1d4..11681d030b 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +18,7 @@ import json import urllib.parse from binascii import unhexlify from typing import List, Optional -from unittest.mock import Mock +from unittest.mock import Mock, patch import synapse.rest.admin from synapse.api.constants import UserTypes @@ -55,8 +54,6 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): self.datastore = Mock(return_value=Mock()) self.datastore.get_current_state_deltas = Mock(return_value=(0, [])) - self.secrets = Mock() - self.hs = self.setup_test_homeserver() self.hs.config.registration_shared_secret = "shared" @@ -85,14 +82,13 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): Calling GET on the endpoint will return a randomised nonce, using the homeserver's secrets provider. """ - secrets = Mock() - secrets.token_hex = Mock(return_value="abcd") - - self.hs.get_secrets = Mock(return_value=secrets) + with patch("secrets.token_hex") as token_hex: + # Patch secrets.token_hex for the duration of this context + token_hex.return_value = "abcd" - channel = self.make_request("GET", self.url) + channel = self.make_request("GET", self.url) - self.assertEqual(channel.json_body, {"nonce": "abcd"}) + self.assertEqual(channel.json_body, {"nonce": "abcd"}) def test_expired_nonce(self): """ @@ -1953,7 +1949,7 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase): # Create rooms and join other_user_tok = self.login("user", "pass") number_rooms = 5 - for n in range(number_rooms): + for _ in range(number_rooms): self.helper.create_room_as(self.other_user, tok=other_user_tok) # Get rooms @@ -2533,7 +2529,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase): user_token: Access token of the user number_media: Number of media to be created for the user """ - for i in range(number_media): + for _ in range(number_media): # file size is 67 Byte image_data = unhexlify( b"89504e470d0a1a0a0000000d4948445200000001000000010806" diff --git a/tests/rest/client/__init__.py b/tests/rest/client/__init__.py index fe0ac3f8e9..629e2df74a 100644 --- a/tests/rest/client/__init__.py +++ b/tests/rest/client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py index c74693e9b2..5cc62a910a 100644 --- a/tests/rest/client/test_consent.py +++ b/tests/rest/client/test_consent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_ephemeral_message.py b/tests/rest/client/test_ephemeral_message.py index 56937dcd2e..eec0fc01f9 100644 --- a/tests/rest/client/test_ephemeral_message.py +++ b/tests/rest/client/test_ephemeral_message.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index 61bdae0879..f9fb6938e9 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_power_levels.py b/tests/rest/client/test_power_levels.py index 5256c11fe6..ba5ad47df5 100644 --- a/tests/rest/client/test_power_levels.py +++ b/tests/rest/client/test_power_levels.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index e0c74591b6..dfd85221d0 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py index be1211dbce..b06c4d4ad5 100644 --- a/tests/rest/client/test_retention.py +++ b/tests/rest/client/test_retention.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index a7ebe0c3e9..e1fe72fc5d 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the 'License'); diff --git a/tests/rest/client/v1/__init__.py b/tests/rest/client/v1/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/tests/rest/client/v1/__init__.py +++ b/tests/rest/client/v1/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py index edd1d184f8..8ed470490b 100644 --- a/tests/rest/client/v1/test_directory.py +++ b/tests/rest/client/v1/test_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 87a18d2cb9..852bda408c 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py index c7b79ab8a7..605b952316 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/v1/test_login.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py index c136827f79..409f3949dc 100644 --- a/tests/rest/client/v1/test_presence.py +++ b/tests/rest/client/v1/test_presence.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,6 +16,7 @@ from unittest.mock import Mock from twisted.internet import defer +from synapse.handlers.presence import PresenceHandler from synapse.rest.client.v1 import presence from synapse.types import UserID @@ -33,7 +33,7 @@ class PresenceTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): - presence_handler = Mock() + presence_handler = Mock(spec=PresenceHandler) presence_handler.set_state.return_value = defer.succeed(None) hs = self.setup_test_homeserver( @@ -60,12 +60,12 @@ class PresenceTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200) self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1) + @unittest.override_config({"use_presence": False}) def test_put_presence_disabled(self): """ PUT to the status endpoint with use_presence disabled will NOT call set_state on the presence handler. """ - self.hs.config.use_presence = False body = {"presence": "here", "status_msg": "beep boop"} channel = self.make_request( diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index f3448c94dd..165ad33fb7 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_push_rule_attrs.py b/tests/rest/client/v1/test_push_rule_attrs.py index 2bc512d75e..d077616082 100644 --- a/tests/rest/client/v1/test_push_rule_attrs.py +++ b/tests/rest/client/v1/test_push_rule_attrs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 4df20c90fd..a3694f3d02 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd @@ -647,7 +646,7 @@ class RoomInviteRatelimitTestCase(RoomBase): def test_invites_by_users_ratelimit(self): """Tests that invites to a specific user are actually rate-limited.""" - for i in range(3): + for _ in range(3): room_id = self.helper.create_room_as(self.user_id) self.helper.invite(room_id, self.user_id, "@other-users:red") @@ -669,7 +668,7 @@ class RoomJoinRatelimitTestCase(RoomBase): ) def test_join_local_ratelimit(self): """Tests that local joins are actually rate-limited.""" - for i in range(3): + for _ in range(3): self.helper.create_room_as(self.user_id) self.helper.create_room_as(self.user_id, expect_code=429) @@ -734,7 +733,7 @@ class RoomJoinRatelimitTestCase(RoomBase): for path in paths_to_test: # Make sure we send more requests than the rate-limiting config would allow # if all of these requests ended up joining the user to a room. - for i in range(4): + for _ in range(4): channel = self.make_request("POST", path % room_id, {}) self.assertEquals(channel.code, 200) diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py index 0b8f565121..0aad48a162 100644 --- a/tests/rest/client/v1/test_typing.py +++ b/tests/rest/client/v1/test_typing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector # diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index a6a292b20c..ed55a640af 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018-2019 New Vector Ltd diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py index e72b61963d..4ef19145d1 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py index ed433d9333..485e3650c3 100644 --- a/tests/rest/client/v2_alpha/test_auth.py +++ b/tests/rest/client/v2_alpha/test_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # Copyright 2020-2021 The Matrix.org Foundation C.I.C # diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py index 287a1a485c..874052c61c 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/v2_alpha/test_capabilities.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index f761c44936..c7e47725b7 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v2_alpha/test_password_policy.py b/tests/rest/client/v2_alpha/test_password_policy.py index 5ebc5707a5..6f07ff6cbb 100644 --- a/tests/rest/client/v2_alpha/test_password_policy.py +++ b/tests/rest/client/v2_alpha/test_password_policy.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 41e52c701f..33f063ad09 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017-2018 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. @@ -310,6 +309,57 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertIsNotNone(channel.json_body.get("sid")) + @unittest.override_config( + { + "public_baseurl": "https://test_server", + "email": { + "smtp_host": "mail_server", + "smtp_port": 2525, + "notif_from": "sender@host", + }, + } + ) + def test_reject_invalid_email(self): + """Check that bad emails are rejected""" + + # Test for email with multiple @ + channel = self.make_request( + "POST", + b"register/email/requestToken", + {"client_secret": "foobar", "email": "email@@email", "send_attempt": 1}, + ) + self.assertEquals(400, channel.code, channel.result) + # Check error to ensure that we're not erroring due to a bug in the test. + self.assertEquals( + channel.json_body, + {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"}, + ) + + # Test for email with no @ + channel = self.make_request( + "POST", + b"register/email/requestToken", + {"client_secret": "foobar", "email": "email", "send_attempt": 1}, + ) + self.assertEquals(400, channel.code, channel.result) + self.assertEquals( + channel.json_body, + {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"}, + ) + + # Test for super long email + email = "a@" + "a" * 1000 + channel = self.make_request( + "POST", + b"register/email/requestToken", + {"client_secret": "foobar", "email": email, "send_attempt": 1}, + ) + self.assertEquals(400, channel.code, channel.result) + self.assertEquals( + channel.json_body, + {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"}, + ) + class RegisterHideProfileTestCase(unittest.HomeserverTestCase): @@ -740,7 +790,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # Check that the HTML we're getting is the one we expect on a successful renewal. expiration_ts = self.get_success(self.store.get_expiration_ts_for_user(user_id)) - expected_html = self.hs.config.account_validity_account_renewed_template.render( + expected_html = self.hs.config.account_validity.account_validity_account_renewed_template.render( expiration_ts=expiration_ts ) self.assertEqual( @@ -758,10 +808,8 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # Check that the HTML we're getting is the one we expect when reusing a # token. The account expiration date should not have changed. - expected_html = ( - self.hs.config.account_validity_account_previously_renewed_template.render( - expiration_ts=expiration_ts - ) + expected_html = self.hs.config.account_validity.account_validity_account_previously_renewed_template.render( + expiration_ts=expiration_ts ) self.assertEqual( channel.result["body"], expected_html.encode("utf8"), channel.result @@ -787,7 +835,9 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # Check that the HTML we're getting is the one we expect when using an # invalid/unknown token. - expected_html = self.hs.config.account_validity_invalid_token_template.render() + expected_html = ( + self.hs.config.account_validity.account_validity_invalid_token_template.render() + ) self.assertEqual( channel.result["body"], expected_html.encode("utf8"), channel.result ) diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py index 21ee436b91..856aa8682f 100644 --- a/tests/rest/client/v2_alpha/test_relations.py +++ b/tests/rest/client/v2_alpha/test_relations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v2_alpha/test_shared_rooms.py b/tests/rest/client/v2_alpha/test_shared_rooms.py index dd83a1f8ff..cedb9614a8 100644 --- a/tests/rest/client/v2_alpha/test_shared_rooms.py +++ b/tests/rest/client/v2_alpha/test_shared_rooms.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Half-Shot # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 8755bfb38a..fdab520c5f 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018-2019 New Vector Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/tests/rest/client/v2_alpha/test_upgrade_room.py b/tests/rest/client/v2_alpha/test_upgrade_room.py index d890d11863..5f3f15fc57 100644 --- a/tests/rest/client/v2_alpha/test_upgrade_room.py +++ b/tests/rest/client/v2_alpha/test_upgrade_room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py index eb8687ce68..3b275bc23b 100644 --- a/tests/rest/key/v2/test_remote_key_resource.py +++ b/tests/rest/key/v2/test_remote_key_resource.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/media/__init__.py b/tests/rest/media/__init__.py index a354d38ca8..b1ee10cfcc 100644 --- a/tests/rest/media/__init__.py +++ b/tests/rest/media/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/media/v1/__init__.py b/tests/rest/media/v1/__init__.py index a354d38ca8..b1ee10cfcc 100644 --- a/tests/rest/media/v1/__init__.py +++ b/tests/rest/media/v1/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/media/v1/test_base.py b/tests/rest/media/v1/test_base.py index ebd7869208..f761e23f1b 100644 --- a/tests/rest/media/v1/test_base.py +++ b/tests/rest/media/v1/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index 375f0b7977..4a213d13dd 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py index 9067463e54..d3ef7bb4c6 100644 --- a/tests/rest/media/v1/test_url_preview.py +++ b/tests/rest/media/v1/test_url_preview.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/test_health.py b/tests/rest/test_health.py index 32acd93dc1..01d48c3860 100644 --- a/tests/rest/test_health.py +++ b/tests/rest/test_health.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 14de0921be..ac0e427752 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/scripts/test_new_matrix_user.py b/tests/scripts/test_new_matrix_user.py index 885b95a51f..6f3c365c9a 100644 --- a/tests/scripts/test_new_matrix_user.py +++ b/tests/scripts/test_new_matrix_user.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/server.py b/tests/server.py index b535a5d886..9df8cda24f 100644 --- a/tests/server.py +++ b/tests/server.py @@ -603,12 +603,6 @@ class FakeTransport: if self.disconnected: return - if not hasattr(self.other, "transport"): - # the other has no transport yet; reschedule - if self.autoflush: - self._reactor.callLater(0.0, self.flush) - return - if maxbytes is not None: to_write = self.buffer[:maxbytes] else: diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py index 4dd5a36178..ac98259b7e 100644 --- a/tests/server_notices/test_consent.py +++ b/tests/server_notices/test_consent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index 450b4ec710..d46521ccdc 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018, 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index 66e3cafe8e..43fc79ca74 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 1ac4ebc61d..200b9198f9 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2019 New Vector Ltd # @@ -14,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import secrets from tests import unittest @@ -22,7 +22,7 @@ class UpsertManyTests(unittest.HomeserverTestCase): def prepare(self, reactor, clock, hs): self.storage = hs.get_datastore() - self.table_name = "table_" + hs.get_secrets().token_hex(6) + self.table_name = "table_" + secrets.token_hex(6) self.get_success( self.storage.db_pool.runInteraction( "create", diff --git a/tests/storage/test_account_data.py b/tests/storage/test_account_data.py index 38444e48e2..01af49a16b 100644 --- a/tests/storage/test_account_data.py +++ b/tests/storage/test_account_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index e755a4db62..666bffe257 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 54e9e7f6fe..3b45a7efd8 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index b02fb32ced..aa20588bbe 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index f7f75320ba..e57fce9694 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/tests/storage/test_database.py b/tests/storage/test_database.py index a906d30e73..6fbac0ab14 100644 --- a/tests/storage/test_database.py +++ b/tests/storage/test_database.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index ef4cf8d0f1..6790aa5242 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index 0db233fd68..41bef62ca8 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_e2e_room_keys.py b/tests/storage/test_e2e_room_keys.py index 3d7760d5d9..9b6b425425 100644 --- a/tests/storage/test_e2e_room_keys.py +++ b/tests/storage/test_e2e_room_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index 1e54b940fd..3bf6e337f4 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index 16daa66cc9..d87f124c26 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the 'License'); diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index d597d712d6..a0e2259478 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the 'License'); diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py index 7691f2d790..088fbb247b 100644 --- a/tests/storage/test_event_metrics.py +++ b/tests/storage/test_event_metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the 'License'); @@ -39,12 +38,12 @@ class ExtremStatisticsTestCase(HomeserverTestCase): last_event = None # Make a real event chain - for i in range(event_count): + for _ in range(event_count): ev = self.create_and_send_event(room_id, user, False, last_event) last_event = [ev] # Sprinkle in some extremities - for i in range(extrems): + for _ in range(extrems): ev = self.create_and_send_event(room_id, user, False, last_event) # Let it run for a while, then pull out the statistics from the diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 0289942f88..1930b37eda 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py index ed898b8dbb..617bc8091f 100644 --- a/tests/storage/test_events.py +++ b/tests/storage/test_events.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py index 6c389fe9ac..792b1c44c1 100644 --- a/tests/storage/test_id_generators.py +++ b/tests/storage/test_id_generators.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 95f309fbbc..a94b5fd721 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_main.py b/tests/storage/test_main.py index 30e46c650d..3dcf6a22e5 100644 --- a/tests/storage/test_main.py +++ b/tests/storage/test_main.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Awesome Technologies Innovationslabor GmbH # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 47556791f4..944dbc34a2 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index c6256fce86..03c473bb3e 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index 41af8c4847..54c5b470c7 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 2d2f58903c..bb31ab756d 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index c82cf15bc2..9748065282 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index 0089d33c93..70257bf210 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index d2aed66f6d..9fa968f6bb 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2019 The Matrix.org Foundation C.I.C. # diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index f06b452fa9..8695264595 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_transactions.py b/tests/storage/test_transactions.py index 8e817e2c7f..b7f7eae8d0 100644 --- a/tests/storage/test_transactions.py +++ b/tests/storage/test_transactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 019c5b7b14..222e5d129d 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_distributor.py b/tests/test_distributor.py index 6a6cf709f6..f8341041ee 100644 --- a/tests/test_distributor.py +++ b/tests/test_distributor.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index b5f18344dc..88888319cc 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_federation.py b/tests/test_federation.py index 382cedbd5d..0ed8326f55 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -76,8 +75,10 @@ class MessageAcceptTests(unittest.HomeserverTestCase): ) self.handler = self.homeserver.get_federation_handler() - self.handler.do_auth = lambda origin, event, context, auth_events: succeed( - context + self.handler._check_event_auth = ( + lambda origin, event, context, state, auth_events, backfilled: succeed( + context + ) ) self.client = self.homeserver.get_federation_client() self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed( diff --git a/tests/test_mau.py b/tests/test_mau.py index 7d92a16a8d..fa6ef92b3b 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_metrics.py b/tests/test_metrics.py index f696fcf89e..b4574b2ffe 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2019 Matrix.org Foundation C.I.C. # diff --git a/tests/test_phone_home.py b/tests/test_phone_home.py index 0f800a075b..09707a74d7 100644 --- a/tests/test_phone_home.py +++ b/tests/test_phone_home.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_preview.py b/tests/test_preview.py index ea83299918..cac3d81ac1 100644 --- a/tests/test_preview.py +++ b/tests/test_preview.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_server.py b/tests/test_server.py index 55cde7f62f..407e172e41 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -202,6 +202,8 @@ class OptionsResourceTests(unittest.TestCase): parse_listener_def({"type": "http", "port": 0}), self.resource, "1.0", + max_request_body_size=1234, + reactor=self.reactor, ) # render the request and return the channel diff --git a/tests/test_state.py b/tests/test_state.py index 0d626f49f6..62f7095873 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index b921ac52c0..f2ef1c6051 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_types.py b/tests/test_types.py index 67ceea6e43..3ab1f15eb8 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index b557ffd692..be6302d170 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C # diff --git a/tests/test_utils/event_injection.py b/tests/test_utils/event_injection.py index 3dfbf8f8a9..e9ec9e085b 100644 --- a/tests/test_utils/event_injection.py +++ b/tests/test_utils/event_injection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # Copyright 2020 The Matrix.org Foundation C.I.C # diff --git a/tests/test_utils/html_parsers.py b/tests/test_utils/html_parsers.py index ad563eb3f0..1fbb38f4be 100644 --- a/tests/test_utils/html_parsers.py +++ b/tests/test_utils/html_parsers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index 74568b34f8..51a197a8c6 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/test_visibility.py b/tests/test_visibility.py index e502ac197e..94b19788d7 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unittest.py b/tests/unittest.py index 92764434bd..74db7c08f1 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018 New Vector # Copyright 2019 Matrix.org Federation C.I.C @@ -19,6 +18,7 @@ import hashlib import hmac import inspect import logging +import secrets import time from typing import Callable, Dict, Iterable, Optional, Tuple, Type, TypeVar, Union from unittest.mock import Mock, patch @@ -134,7 +134,7 @@ class TestCase(unittest.TestCase): def assertObjectHasAttributes(self, attrs, obj): """Asserts that the given object has each of the attributes given, and that the value of each matches according to assertEquals.""" - for (key, value) in attrs.items(): + for key in attrs.keys(): if not hasattr(obj, key): raise AssertionError("Expected obj to have a '.%s'" % key) try: @@ -248,6 +248,8 @@ class HomeserverTestCase(TestCase): config=self.hs.config.server.listeners[0], resource=self.resource, server_version_string="1", + max_request_body_size=1234, + reactor=self.reactor, ) from tests.rest.client.v1.utils import RestHelper @@ -625,7 +627,6 @@ class HomeserverTestCase(TestCase): str: The new event's ID. """ event_creator = self.hs.get_event_creation_handler() - secrets = self.hs.get_secrets() requester = create_requester(user) event, context = self.get_success( diff --git a/tests/util/__init__.py b/tests/util/__init__.py index bfebb0f644..5e83dba2ed 100644 --- a/tests/util/__init__.py +++ b/tests/util/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/caches/__init__.py b/tests/util/caches/__init__.py index 451dae3b6c..830e2dfe91 100644 --- a/tests/util/caches/__init__.py +++ b/tests/util/caches/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/caches/test_cached_call.py b/tests/util/caches/test_cached_call.py index f349b5ced0..80b97167ba 100644 --- a/tests/util/caches/test_cached_call.py +++ b/tests/util/caches/test_cached_call.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py index c24c33ee91..54a88a8325 100644 --- a/tests/util/caches/test_deferred_cache.py +++ b/tests/util/caches/test_deferred_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 8c082e7432..178ac8a68c 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/tests/util/caches/test_ttlcache.py b/tests/util/caches/test_ttlcache.py index 23018081e5..fe8314057d 100644 --- a/tests/util/caches/test_ttlcache.py +++ b/tests/util/caches/test_ttlcache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py index 17fd86d02d..069f875962 100644 --- a/tests/util/test_async_utils.py +++ b/tests/util/test_async_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 2f41333f4c..bee66dee43 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index 49ffeebd0e..e6e13ba06c 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2017 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py index d1372f6bc2..3bb4695405 100644 --- a/tests/util/test_file_consumer.py +++ b/tests/util/test_file_consumer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py index e931a7ec18..1bd0b45d94 100644 --- a/tests/util/test_itertools.py +++ b/tests/util/test_itertools.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 0e52811948..c4a3917b23 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # Copyright 2018 New Vector Ltd # diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py index 0fb60caacb..a2e08281e6 100644 --- a/tests/util/test_logformatter.py +++ b/tests/util/test_logformatter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index ce4f1cc30a..df3e27779f 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_ratelimitutils.py b/tests/util/test_ratelimitutils.py index 3fed55090a..34aaffe859 100644 --- a/tests/util/test_ratelimitutils.py +++ b/tests/util/test_ratelimitutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_retryutils.py b/tests/util/test_retryutils.py index 5f46ed0cef..9b2be83a43 100644 --- a/tests/util/test_retryutils.py +++ b/tests/util/test_retryutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index d3dea3b52a..a10071c70f 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_stringutils.py b/tests/util/test_stringutils.py index 8491f7cc83..f7fecd9cf3 100644 --- a/tests/util/test_stringutils.py +++ b/tests/util/test_stringutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_threepids.py b/tests/util/test_threepids.py index 5513724d87..d957b953bb 100644 --- a/tests/util/test_threepids.py +++ b/tests/util/test_threepids.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2020 Dirk Klimpel # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_treecache.py b/tests/util/test_treecache.py index a5f2261208..3b077af27e 100644 --- a/tests/util/test_treecache.py +++ b/tests/util/test_treecache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/util/test_wheel_timer.py b/tests/util/test_wheel_timer.py index 03201a4d9b..0d5039de04 100644 --- a/tests/util/test_wheel_timer.py +++ b/tests/util/test_wheel_timer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/utils.py b/tests/utils.py index 65d7ad58d9..93478b2b0a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2018-2019 New Vector Ltd # @@ -310,7 +309,7 @@ def setup_test_homeserver( # database for a few more seconds due to flakiness, preventing # us from dropping it when the test is over. If we can't drop # it, warn and move on. - for x in range(5): + for _ in range(5): try: cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,)) db_conn.commit() diff --git a/tox.ini b/tox.ini index 8e27efaebe..3edd150df7 100644 --- a/tox.ini +++ b/tox.ini @@ -21,13 +21,11 @@ deps = # installed on that). # # anyway, make sure that we have a recent enough setuptools. - setuptools>=18.5 ; python_version >= '3.6' - setuptools>=18.5,<51.0.0 ; python_version < '3.6' + setuptools>=18.5 # we also need a semi-recent version of pip, because old ones fail to # install the "enum34" dependency of cryptography. - pip>=10 ; python_version >= '3.6' - pip>=10,<21.0 ; python_version < '3.6' + pip>=10 # directories/files we run the linters on. # if you update this list, make sure to do the same in scripts-dev/lint.sh @@ -168,8 +166,7 @@ skip_install = true usedevelop = false deps = coverage - pip>=10 ; python_version >= '3.6' - pip>=10,<21.0 ; python_version < '3.6' + pip>=10 commands= coverage combine coverage report |