diff options
143 files changed, 3700 insertions, 218 deletions
diff --git a/.ci/docker-compose-env b/.ci/docker-compose-env new file mode 100644 index 0000000000..85b102d07f --- /dev/null +++ b/.ci/docker-compose-env @@ -0,0 +1,13 @@ +CI +BUILDKITE +BUILDKITE_BUILD_NUMBER +BUILDKITE_BRANCH +BUILDKITE_BUILD_NUMBER +BUILDKITE_JOB_ID +BUILDKITE_BUILD_URL +BUILDKITE_PROJECT_SLUG +BUILDKITE_COMMIT +BUILDKITE_PULL_REQUEST +BUILDKITE_TAG +CODECOV_TOKEN +TRIAL_FLAGS diff --git a/.ci/docker-compose.yaml b/.ci/docker-compose.yaml new file mode 100644 index 0000000000..73d5ccdd5e --- /dev/null +++ b/.ci/docker-compose.yaml @@ -0,0 +1,23 @@ +version: '3.1' + +services: + + postgres: + image: postgres:${POSTGRES_VERSION?} + environment: + POSTGRES_PASSWORD: postgres + POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8" + command: -c fsync=off + + testenv: + image: python:${PYTHON_VERSION?} + depends_on: + - postgres + env_file: docker-compose-env + environment: + SYNAPSE_POSTGRES_HOST: postgres + SYNAPSE_POSTGRES_USER: postgres + SYNAPSE_POSTGRES_PASSWORD: postgres + working_dir: /src + volumes: + - ${BUILDKITE_BUILD_CHECKOUT_PATH}:/src diff --git a/.ci/pipeline.yml b/.ci/pipeline.yml new file mode 100644 index 0000000000..a99d068e96 --- /dev/null +++ b/.ci/pipeline.yml @@ -0,0 +1,497 @@ +# This is just a dummy entry (the `x-yaml-aliases` key is not an official pipeline key, and will be ignored by BuildKite) +# that we use only to store YAML anchors (`&xxx`), that we plan to use and reference later in the YAML file (using `*xxx`) +# without having to copy/paste the same values over and over. +# Note: keys like `agent`, `env`, … used here are totally arbitrary; the only point is to define various separate `&xxx` anchors there. +# +x-yaml-aliases: + commands: + - &trial_setup | + # Install additional packages that are not part of buildpack-deps / python images. + apt-get update && apt-get install -y xmlsec1 + python -m pip install tox + + retry: &retry_setup + automatic: + - exit_status: -1 + limit: 2 + - exit_status: 2 + limit: 2 + +env: + COVERALLS_REPO_TOKEN: wsJWOby6j0uCYFiCes3r0XauxO27mx8lD + +steps: + - label: "\U0001F9F9 Check Style" + command: + - "python -m pip install tox" + - "tox -e check_codestyle" + plugins: + - docker#v3.7.0: + image: "python:3.6" + mount-buildkite-agent: false + + - label: "\U0001F9F9 packaging" + command: + - "python -m pip install tox" + - "tox -e packaging" + plugins: + - docker#v3.7.0: + image: "python:3.6" + mount-buildkite-agent: false + + - label: "\U0001F9F9 isort" + command: + - "python -m pip install tox" + - "tox -e check_isort" + plugins: + - docker#v3.7.0: + image: "python:3.6" + mount-buildkite-agent: false + + - label: "\U0001F9F9 check-sample-config" + command: + - "python -m pip install tox" + - "tox -e check-sampleconfig" + plugins: + - docker#v3.7.0: + image: "python:3.6" + mount-buildkite-agent: false + + - label: "\U0001F5A5 check unix line-endings" + command: + - "scripts-dev/check_line_terminators.sh" + plugins: + - docker#v3.7.0: + image: "python:3.6" + mount-buildkite-agent: false + + - label: ":mypy: mypy" + command: + - "python -m pip install tox" + - "tox -e mypy" + plugins: + - docker#v3.7.0: + image: "python:3.7" + mount-buildkite-agent: false + + - label: ":package: build distribution files" + branches: "release-*" + agents: + queue: ephemeral-small + command: + - python setup.py sdist bdist_wheel + plugins: + - docker#v3.7.0: + image: "python:3.7" + mount-buildkite-agent: false + - artifacts#v1.3.0: + upload: + - dist/* + + - wait + + ################################################################################ + # + # Twisted `trial` tests + # + # Our Intent is to test: + # - All supported Python versions (using SQLite) with current dependencies + # - The oldest and newest supported pairings of Python and PostgreSQL + # + # We also test two special cases: + # - The newest supported Python, without any optional dependencies + # - The oldest supported Python, with its oldest supported dependency versions + # + ################################################################################ + + # -- Special Case: Oldest Python w/ Oldest Deps + + # anoa: I've commented this out for DINUM as it was breaking on the 1.31.0 merge + # and it was taking way too long to solve. DINUM aren't even using Python 3.6 + # anyways. +# - label: ":python: 3.6 (Old Deps)" +# command: +# - ".buildkite/scripts/test_old_deps.sh" +# env: +# TRIAL_FLAGS: "-j 2" +# plugins: +# - docker#v3.7.0: +# # We use bionic to get an old python (3.6.5) and sqlite (3.22) +# image: "ubuntu:bionic" +# workdir: "/src" +# mount-buildkite-agent: false +# propagate-environment: true +# - artifacts#v1.3.0: +# upload: [ "_trial_temp/*/*.log" ] +# retry: *retry_setup + + # -- Special Case: Newest Python w/o Optional Deps + + - label: ":python: 3.9 (No Extras)" + command: + - *trial_setup + - "tox -e py39-noextras,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "python:3.9" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + # -- All Supported Python Versions (SQLite) + + - label: ":python: 3.6" + command: + - *trial_setup + - "tox -e py36,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "python:3.6" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + - label: ":python: 3.7" + command: + - *trial_setup + - "tox -e py37,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "python:3.7" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + - label: ":python: 3.8" + command: + - *trial_setup + - "tox -e py38,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "python:3.8" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + - label: ":python: 3.9" + command: + - *trial_setup + - "tox -e py39,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "python:3.9" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + # -- Oldest and Newest Supported Python and Postgres Pairings + + - label: ":python: 3.6 :postgres: 9.6" + agents: + queue: "medium" + env: + TRIAL_FLAGS: "-j 8" + PYTHON_VERSION: "3.6" + POSTGRES_VERSION: "9.6" + command: + - *trial_setup + - "python -m tox -e py36-postgres,combine" + plugins: + - matrix-org/download#v1.1.0: + urls: + - https://raw.githubusercontent.com/matrix-org/pipelines/master/synapse/docker-compose.yaml + - https://raw.githubusercontent.com/matrix-org/pipelines/master/synapse/docker-compose-env + - docker-compose#v3.7.0: + run: testenv + config: + - /tmp/download-${BUILDKITE_BUILD_ID}/docker-compose.yaml + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + - label: ":python: 3.9 :postgres: 13" + agents: + queue: "medium" + env: + TRIAL_FLAGS: "-j 8" + PYTHON_VERSION: "3.9" + POSTGRES_VERSION: "13" + command: + - *trial_setup + - "python -m tox -e py39-postgres,combine" + plugins: + - matrix-org/download#v1.1.0: + urls: + - https://raw.githubusercontent.com/matrix-org/pipelines/master/synapse/docker-compose.yaml + - https://raw.githubusercontent.com/matrix-org/pipelines/master/synapse/docker-compose-env + - docker-compose#v3.7.0: + run: testenv + config: + - /tmp/download-${BUILDKITE_BUILD_ID}/docker-compose.yaml + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + # -- Experimentally test against PyPy + # Only runs when the build message includes the string "pypy" + # This step is allowed to fail + + - label: ":python: PyPy3.6" + if: "build.message =~ /pypy/i || build.branch =~ /pypy/i" + soft_fail: true + command: + # No *trial_setup due to docker-library/pypy#52 + - "apt-get update && apt-get install -y xmlsec1" + - "pypy -m pip install tox" + + - "tox -e pypy36,combine" + env: + TRIAL_FLAGS: "-j 2" + plugins: + - docker#v3.7.0: + image: "pypy:3.6" + workdir: "/src" + mount-buildkite-agent: false + propagate-environment: true + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + retry: *retry_setup + + + ################################################################################ + # + # Sytest + # + # Our tests have three dimensions: + # 1. Topology (Monolith, Workers, Workers w/ Redis) + # 2. Database (SQLite, PostgreSQL) + # 3. Python Version + # + # Tests can run against either a single or multiple PostgreSQL databases. + # This is configured by setting `MULTI_POSTGRES=1` in the environment. + # + # We mostly care about testing each topology. + # For DINSIC specifically, we currently test across one Linux distribution, + # Debian buster (10), which has Python 3.7 and Postgres 11 + # + # TODO: this leaves us without sytests for Postgres 9.6. How much do we care + # about that? + # + # Our intent is to test: + # - Monolith: + # - Older Distro + SQLite + # - Older Python + Older PostgreSQL + # - Newer Python + Newer PostgreSQL + # - Workers: + # - Older Python + Older PostgreSQL (MULTI_POSTGRES) + # - Newer Python + Newer PostgreSQL (MULTI_POSTGRES) + # - Workers w/ Redis: + # - Newer Python + Newer PostgreSQL + # + ################################################################################ + + - label: "SyTest Monolith :postgres::debian: 10" + agents: + queue: "medium" + env: + POSTGRES: "1" + command: + - "bash .buildkite/merge_base_branch.sh" + - "bash /bootstrap.sh synapse" + plugins: + - docker#v3.7.0: + image: "matrixdotorg/sytest-synapse:dinsic" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.3.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" + retry: *retry_setup + + - label: "SyTest Workers :postgres::debian: 10" + agents: + queue: "xlarge" + env: + MULTI_POSTGRES: "1" # Test with split out databases + POSTGRES: "1" + WORKERS: "1" + BLACKLIST: "synapse-blacklist-with-workers" + command: + - "bash .buildkite/merge_base_branch.sh" + - "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'" + - "bash /bootstrap.sh synapse" + plugins: + - docker#v3.7.0: + image: "matrixdotorg/sytest-synapse:dinsic" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.3.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" + retry: *retry_setup + + - label: "SyTest Workers :redis::postgres::debian: 10" + agents: + # this one seems to need a lot of memory. + queue: "xlarge" + env: + POSTGRES: "1" + WORKERS: "1" + REDIS: "1" + BLACKLIST: "synapse-blacklist-with-workers" + command: + - "bash .buildkite/merge_base_branch.sh" + - "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'" + - "bash /bootstrap.sh synapse" + plugins: + - docker#v3.7.0: + image: "matrixdotorg/sytest-synapse:dinsic" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.3.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" + retry: *retry_setup + + ################################################################################ + # + # synapse_port_db + # + # Tests the oldest and newest supported pairings of Python and PostgreSQL + # + ################################################################################ + + - label: "Port DB :python: 3.6 :postgres: 9.6" + agents: + queue: "medium" + env: + PYTHON_VERSION: "3.6" + POSTGRES_VERSION: "9.6" + command: + - "bash .buildkite/scripts/test_synapse_port_db.sh" + plugins: + - matrix-org/download#v1.1.0: + urls: + - https://raw.githubusercontent.com/matrix-org/synapse-dinsic/anoa/dinsic_release_1_31_0/.buildkite/docker-compose.yaml + - https://raw.githubusercontent.com/matrix-org/synapse-dinsic/anoa/dinsic_release_1_31_0/.buildkite/docker-compose-env + - docker-compose#v2.1.0: + run: testenv + config: + - /tmp/download-${BUILDKITE_BUILD_ID}/docker-compose.yaml + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + + - label: "Port DB :python: 3.9 :postgres: 13" + agents: + queue: "medium" + env: + PYTHON_VERSION: "3.9" + POSTGRES_VERSION: "13" + command: + - "bash .buildkite/scripts/test_synapse_port_db.sh" + plugins: + - matrix-org/download#v1.1.0: + urls: + - https://raw.githubusercontent.com/matrix-org/synapse-dinsic/anoa/dinsic_release_1_31_0/.buildkite/docker-compose.yaml + - https://raw.githubusercontent.com/matrix-org/synapse-dinsic/anoa/dinsic_release_1_31_0/.buildkite/docker-compose-env + - docker-compose#v2.1.0: + run: testenv + config: + - /tmp/download-${BUILDKITE_BUILD_ID}/docker-compose.yaml + - artifacts#v1.3.0: + upload: [ "_trial_temp/*/*.log" ] + +# - wait: ~ +# continue_on_failure: true +# +# - label: Trigger webhook +# command: "curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d \"payload[build_num]=$BUILDKITE_BUILD_NUMBER&payload[status]=done\"" + + ################################################################################ + # + # Complement Test Suite + # + ################################################################################ + + - command: + # Build a docker image from the checked out Synapse source + - "docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile ." + # We use the complement:latest image to provide Complement's dependencies, but want + # to actually run against the latest version of Complement, so download it here. + # NOTE: We use the `anoa/knock_room_v7` branch here while knocking is still experimental on mainline. + # This branch essentially uses the stable identifiers for all knock-related room state so that things + # don't clash when rooms created on dinsic's Synapse potentially federate with mainline Synapse's in + # the future. + - "wget https://github.com/matrix-org/complement/archive/anoa/knock_room_v7.tar.gz" + - "tar -xzf knock_room_v7.tar.gz" + # Build a second docker image on top of the above image. This one sets up Synapse with a generated config file, + # signing and SSL keys so Synapse can run and federate + - "docker build -t complement-synapse -f complement-anoa-knock_room_v7/dockerfiles/Synapse.Dockerfile complement-anoa-knock_room_v7/dockerfiles" + # Finally, compile and run the tests. + - "cd complement-anoa-knock_room_v7" + - "COMPLEMENT_BASE_IMAGE=complement-synapse:latest go test -v -tags synapse_blacklist,msc2403 ./tests" + label: "\U0001F9EA Complement" + agents: + queue: "medium" + plugins: + - docker#v3.7.0: + # The dockerfile for this image is at https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile. + image: "matrixdotorg/complement:latest" + mount-buildkite-agent: false + # Complement needs to know if it is running under CI + environment: + - "CI=true" + publish: [ "8448:8448" ] + # Complement uses Docker so pass through the docker socket. This means Complement shares + # the hosts Docker. + volumes: + - "/var/run/docker.sock:/var/run/docker.sock" \ No newline at end of file diff --git a/.github/workflows.yml b/.github/workflows.yml new file mode 100644 index 0000000000..45ab754730 --- /dev/null +++ b/.github/workflows.yml @@ -0,0 +1,16 @@ +name: "Assign Reviewers" +on: + pull_request: + types: [opened, ready_for_review] + +jobs: + assign-reviewers: + runs-on: ubuntu-latest + steps: + - name: "Assign Team and Persons" + uses: rowi1de/auto-assign-review-teams@v1.0.2 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + teams: synapse-core # only works for GitHub Organisation/Teams + include-draft: false # Draft PRs will be skipped (default: false) + skip-with-manual-reviewers: 1 # Skip this action, if the number of reviwers was already assigned (default: 0) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8d7e8cafd9..e32f2f378b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -213,23 +213,16 @@ jobs: fail-fast: false matrix: include: - - sytest-tag: bionic + - sytest-tag: dinsic - - sytest-tag: bionic + - sytest-tag: dinsic postgres: postgres - - sytest-tag: testing - postgres: postgres - - - sytest-tag: bionic - postgres: multi-postgres - workers: workers - - - sytest-tag: buster + - sytest-tag: dinsic postgres: multi-postgres workers: workers - - sytest-tag: buster + - sytest-tag: dinsic postgres: postgres workers: workers redis: redis diff --git a/MANIFEST.in b/MANIFEST.in index c24786c3b3..2f68c9076f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,5 @@ include synctl +include sytest-blacklist include LICENSE include VERSION include *.rst @@ -54,3 +55,11 @@ prune demo/etc prune docker prune snap prune stubs + +exclude jenkins* +recursive-exclude jenkins *.sh + +# FIXME: we shouldn't have these templates here +recursive-include res/templates-dinsic *.css +recursive-include res/templates-dinsic *.html +recursive-include res/templates-dinsic *.txt diff --git a/changelog.d/1.feature b/changelog.d/1.feature new file mode 100644 index 0000000000..845642e445 --- /dev/null +++ b/changelog.d/1.feature @@ -0,0 +1 @@ +Forbid changing the name, avatar or topic of a direct room. diff --git a/changelog.d/10.bugfix b/changelog.d/10.bugfix new file mode 100644 index 0000000000..51f89f46dd --- /dev/null +++ b/changelog.d/10.bugfix @@ -0,0 +1 @@ +Don't apply retention policy based filtering on state events. diff --git a/changelog.d/104.misc b/changelog.d/104.misc new file mode 100644 index 0000000000..e663abdbab --- /dev/null +++ b/changelog.d/104.misc @@ -0,0 +1 @@ +Remove shadow HS support. diff --git a/changelog.d/105.misc b/changelog.d/105.misc new file mode 100644 index 0000000000..197cc6f05c --- /dev/null +++ b/changelog.d/105.misc @@ -0,0 +1 @@ +Ensure the Rust reporter passes type checking with jaeger-client 4.7's type annotations. diff --git a/changelog.d/106.misc b/changelog.d/106.misc new file mode 100644 index 0000000000..3e34f19a90 --- /dev/null +++ b/changelog.d/106.misc @@ -0,0 +1 @@ +Use correct image for sytest. diff --git a/changelog.d/10894.feature b/changelog.d/10894.feature new file mode 100644 index 0000000000..a4f968bed1 --- /dev/null +++ b/changelog.d/10894.feature @@ -0,0 +1 @@ +Add a `user_may_send_3pid_invite` spam checker callback for modules to allow or deny 3PID invites. diff --git a/changelog.d/10898.feature b/changelog.d/10898.feature new file mode 100644 index 0000000000..97fa39fd0c --- /dev/null +++ b/changelog.d/10898.feature @@ -0,0 +1 @@ +Add a `user_may_create_room_with_invites` spam checker callback to allow modules to allow or deny a room creation request based on the invites and/or 3PID invites it includes. diff --git a/changelog.d/10910.feature b/changelog.d/10910.feature new file mode 100644 index 0000000000..aee139f8b6 --- /dev/null +++ b/changelog.d/10910.feature @@ -0,0 +1 @@ +Add a spam checker callback to allow or deny room joins. diff --git a/changelog.d/10931.bugfix b/changelog.d/10931.bugfix new file mode 100644 index 0000000000..3f30c9ccf1 --- /dev/null +++ b/changelog.d/10931.bugfix @@ -0,0 +1 @@ +Fix debian builds due to dh-virtualenv no longer being able to build their docs. diff --git a/changelog.d/11.feature b/changelog.d/11.feature new file mode 100644 index 0000000000..362e4b1efd --- /dev/null +++ b/changelog.d/11.feature @@ -0,0 +1 @@ +Allow server admins to configure a custom global rate-limiting for third party invites. \ No newline at end of file diff --git a/changelog.d/11204.feature b/changelog.d/11204.feature new file mode 100644 index 0000000000..f58ed4b3dc --- /dev/null +++ b/changelog.d/11204.feature @@ -0,0 +1 @@ +Add a module API method to retrieve the current state of a room. diff --git a/changelog.d/12.feature b/changelog.d/12.feature new file mode 100644 index 0000000000..8e6e7a28af --- /dev/null +++ b/changelog.d/12.feature @@ -0,0 +1 @@ +Add `/user/:user_id/info` CS servlet and to give user deactivated/expired information. \ No newline at end of file diff --git a/changelog.d/13.feature b/changelog.d/13.feature new file mode 100644 index 0000000000..c2d2e93abf --- /dev/null +++ b/changelog.d/13.feature @@ -0,0 +1 @@ +Hide expired users from the user directory, and optionally re-add them on renewal. \ No newline at end of file diff --git a/changelog.d/14.feature b/changelog.d/14.feature new file mode 100644 index 0000000000..020d0bac1e --- /dev/null +++ b/changelog.d/14.feature @@ -0,0 +1 @@ +User displaynames now have capitalised letters after - symbols. \ No newline at end of file diff --git a/changelog.d/15.misc b/changelog.d/15.misc new file mode 100644 index 0000000000..4cc4a5175f --- /dev/null +++ b/changelog.d/15.misc @@ -0,0 +1 @@ +Fix the ordering on `scripts/generate_signing_key.py`'s import statement. diff --git a/changelog.d/17.misc b/changelog.d/17.misc new file mode 100644 index 0000000000..58120ab5c7 --- /dev/null +++ b/changelog.d/17.misc @@ -0,0 +1 @@ +Blacklist some flaky sytests until they're fixed. \ No newline at end of file diff --git a/changelog.d/18.feature b/changelog.d/18.feature new file mode 100644 index 0000000000..f5aa29a6e8 --- /dev/null +++ b/changelog.d/18.feature @@ -0,0 +1 @@ +Add option `limit_profile_requests_to_known_users` to prevent requirement of a user sharing a room with another user to query their profile information. \ No newline at end of file diff --git a/changelog.d/19.feature b/changelog.d/19.feature new file mode 100644 index 0000000000..95a44a4a89 --- /dev/null +++ b/changelog.d/19.feature @@ -0,0 +1 @@ +Add `max_avatar_size` and `allowed_avatar_mimetypes` to restrict the size of user avatars and their file type respectively. \ No newline at end of file diff --git a/changelog.d/2.bugfix b/changelog.d/2.bugfix new file mode 100644 index 0000000000..4fe5691468 --- /dev/null +++ b/changelog.d/2.bugfix @@ -0,0 +1 @@ +Don't treat 3PID revocation as a new 3PID invite. diff --git a/changelog.d/20.bugfix b/changelog.d/20.bugfix new file mode 100644 index 0000000000..8ba53c28f9 --- /dev/null +++ b/changelog.d/20.bugfix @@ -0,0 +1 @@ +Validate `client_secret` parameter against the regex provided by the C-S spec. \ No newline at end of file diff --git a/changelog.d/21.bugfix b/changelog.d/21.bugfix new file mode 100644 index 0000000000..630d7812f7 --- /dev/null +++ b/changelog.d/21.bugfix @@ -0,0 +1 @@ +Fix resetting user passwords via a phone number. diff --git a/changelog.d/28.bugfix b/changelog.d/28.bugfix new file mode 100644 index 0000000000..38d7455971 --- /dev/null +++ b/changelog.d/28.bugfix @@ -0,0 +1 @@ +Fix a bug causing account validity renewal emails to be sent even if the feature is turned off in some cases. diff --git a/changelog.d/29.misc b/changelog.d/29.misc new file mode 100644 index 0000000000..720e0ddcfb --- /dev/null +++ b/changelog.d/29.misc @@ -0,0 +1 @@ +Improve performance when making `.well-known` requests by sharing the SSL options between requests. diff --git a/changelog.d/3.bugfix b/changelog.d/3.bugfix new file mode 100644 index 0000000000..cc4bcefa80 --- /dev/null +++ b/changelog.d/3.bugfix @@ -0,0 +1 @@ +Fix encoding on password reset HTML responses in Python 2. diff --git a/changelog.d/30.misc b/changelog.d/30.misc new file mode 100644 index 0000000000..ae68554be3 --- /dev/null +++ b/changelog.d/30.misc @@ -0,0 +1 @@ +Improve performance when making HTTP requests to sygnal, sydent, etc, by sharing the SSL context object between connections. diff --git a/changelog.d/32.bugfix b/changelog.d/32.bugfix new file mode 100644 index 0000000000..b6e7b90710 --- /dev/null +++ b/changelog.d/32.bugfix @@ -0,0 +1 @@ +Fixes a bug when using the default display name during registration. diff --git a/changelog.d/39.feature b/changelog.d/39.feature new file mode 100644 index 0000000000..426b7ef27e --- /dev/null +++ b/changelog.d/39.feature @@ -0,0 +1 @@ +Merge Synapse v1.12.4 `master` into the `dinsic` branch. \ No newline at end of file diff --git a/changelog.d/4.bugfix b/changelog.d/4.bugfix new file mode 100644 index 0000000000..fe717920a6 --- /dev/null +++ b/changelog.d/4.bugfix @@ -0,0 +1 @@ +Fix handling of filtered strings in Python 3. diff --git a/changelog.d/45.feature b/changelog.d/45.feature new file mode 100644 index 0000000000..d45ac34ac1 --- /dev/null +++ b/changelog.d/45.feature @@ -0,0 +1 @@ +Merge Synapse mainline releases v1.13.0 through v1.14.0 into the `dinsic` branch. \ No newline at end of file diff --git a/changelog.d/46.feature b/changelog.d/46.feature new file mode 100644 index 0000000000..7872d956e3 --- /dev/null +++ b/changelog.d/46.feature @@ -0,0 +1 @@ +Add a bulk version of the User Info API. Deprecate the single-use version. \ No newline at end of file diff --git a/changelog.d/47.misc b/changelog.d/47.misc new file mode 100644 index 0000000000..1d6596d788 --- /dev/null +++ b/changelog.d/47.misc @@ -0,0 +1 @@ +Improve performance of `mark_expired_users_as_inactive` background job. \ No newline at end of file diff --git a/changelog.d/48.feature b/changelog.d/48.feature new file mode 100644 index 0000000000..b7939f3f51 --- /dev/null +++ b/changelog.d/48.feature @@ -0,0 +1 @@ +Prevent `/register` from raising `M_USER_IN_USE` until UI Auth has been completed. Have `/register/available` always return true. diff --git a/changelog.d/5.bugfix b/changelog.d/5.bugfix new file mode 100644 index 0000000000..53f57f46ca --- /dev/null +++ b/changelog.d/5.bugfix @@ -0,0 +1 @@ +Fix room retention policy management in worker mode. diff --git a/changelog.d/50.feature b/changelog.d/50.feature new file mode 100644 index 0000000000..0801622c8a --- /dev/null +++ b/changelog.d/50.feature @@ -0,0 +1 @@ +Merge Synapse mainline v1.15.1 into the `dinsic` branch. \ No newline at end of file diff --git a/changelog.d/5083.feature b/changelog.d/5083.feature new file mode 100644 index 0000000000..2ffdd37eef --- /dev/null +++ b/changelog.d/5083.feature @@ -0,0 +1 @@ +Adds auth_profile_reqs option to require access_token to GET /profile endpoints on CS API. diff --git a/changelog.d/5098.misc b/changelog.d/5098.misc new file mode 100644 index 0000000000..9cd83bf226 --- /dev/null +++ b/changelog.d/5098.misc @@ -0,0 +1 @@ +Add workarounds for pep-517 install errors. diff --git a/changelog.d/51.feature b/changelog.d/51.feature new file mode 100644 index 0000000000..e5c9990ad6 --- /dev/null +++ b/changelog.d/51.feature @@ -0,0 +1 @@ +Add `bind_new_user_emails_to_sydent` option for automatically binding user's emails after registration. diff --git a/changelog.d/5214.feature b/changelog.d/5214.feature new file mode 100644 index 0000000000..6c0f15c901 --- /dev/null +++ b/changelog.d/5214.feature @@ -0,0 +1 @@ +Allow server admins to define and enforce a password policy (MSC2000). diff --git a/changelog.d/53.feature b/changelog.d/53.feature new file mode 100644 index 0000000000..96c628e824 --- /dev/null +++ b/changelog.d/53.feature @@ -0,0 +1 @@ +Merge mainline Synapse v1.18.0 into the `dinsic` branch. \ No newline at end of file diff --git a/changelog.d/5416.misc b/changelog.d/5416.misc new file mode 100644 index 0000000000..155e8c7cd3 --- /dev/null +++ b/changelog.d/5416.misc @@ -0,0 +1 @@ +Add unique index to the profile_replication_status table. diff --git a/changelog.d/5420.feature b/changelog.d/5420.feature new file mode 100644 index 0000000000..745864b903 --- /dev/null +++ b/changelog.d/5420.feature @@ -0,0 +1 @@ +Add configuration option to hide new users from the user directory. diff --git a/changelog.d/56.misc b/changelog.d/56.misc new file mode 100644 index 0000000000..f66c55af21 --- /dev/null +++ b/changelog.d/56.misc @@ -0,0 +1 @@ +Temporarily revert commit a3fbc23. diff --git a/changelog.d/5610.feature b/changelog.d/5610.feature new file mode 100644 index 0000000000..b99514f97e --- /dev/null +++ b/changelog.d/5610.feature @@ -0,0 +1 @@ +Implement new custom event rules for power levels. diff --git a/changelog.d/57.misc b/changelog.d/57.misc new file mode 100644 index 0000000000..1bbe8611cd --- /dev/null +++ b/changelog.d/57.misc @@ -0,0 +1 @@ +Add user_id back to presence in worker too https://github.com/matrix-org/synapse/commit/0bbbd10513008d30c17eb1d1e7ba1d091fb44ec7 . diff --git a/changelog.d/5702.bugfix b/changelog.d/5702.bugfix new file mode 100644 index 0000000000..43b6e39b13 --- /dev/null +++ b/changelog.d/5702.bugfix @@ -0,0 +1 @@ +Fix 3PID invite to invite association detection in the Tchap room access rules. diff --git a/changelog.d/5760.feature b/changelog.d/5760.feature new file mode 100644 index 0000000000..90302d793e --- /dev/null +++ b/changelog.d/5760.feature @@ -0,0 +1 @@ +Force the access rule to be "restricted" if the join rule is "public". diff --git a/changelog.d/58.misc b/changelog.d/58.misc new file mode 100644 index 0000000000..64098a68a4 --- /dev/null +++ b/changelog.d/58.misc @@ -0,0 +1 @@ +Don't push if an user account has expired. diff --git a/changelog.d/59.feature b/changelog.d/59.feature new file mode 100644 index 0000000000..aa07f762d1 --- /dev/null +++ b/changelog.d/59.feature @@ -0,0 +1 @@ +Freeze a room when the last administrator in the room leaves. \ No newline at end of file diff --git a/changelog.d/6.bugfix b/changelog.d/6.bugfix new file mode 100644 index 0000000000..43ab65cc95 --- /dev/null +++ b/changelog.d/6.bugfix @@ -0,0 +1 @@ +Don't forbid membership events which membership isn't 'join' or 'invite' in restricted rooms, so that users who got into these rooms before the access rules started to be enforced can leave them. diff --git a/changelog.d/60.misc b/changelog.d/60.misc new file mode 100644 index 0000000000..d2625a4f65 --- /dev/null +++ b/changelog.d/60.misc @@ -0,0 +1 @@ +Make all rooms noisy by default. diff --git a/changelog.d/61.misc b/changelog.d/61.misc new file mode 100644 index 0000000000..0c3ba98628 --- /dev/null +++ b/changelog.d/61.misc @@ -0,0 +1 @@ +Change the minimum power levels for invites and other state events in new rooms. \ No newline at end of file diff --git a/changelog.d/62.misc b/changelog.d/62.misc new file mode 100644 index 0000000000..1e26456595 --- /dev/null +++ b/changelog.d/62.misc @@ -0,0 +1 @@ +Type hinting and other cleanups for `synapse.third_party_rules.access_rules`. \ No newline at end of file diff --git a/changelog.d/63.feature b/changelog.d/63.feature new file mode 100644 index 0000000000..b45f38fa94 --- /dev/null +++ b/changelog.d/63.feature @@ -0,0 +1 @@ +Make AccessRules use the public rooms directory instead of checking a room's join rules on rule change. diff --git a/changelog.d/64.bugfix b/changelog.d/64.bugfix new file mode 100644 index 0000000000..60c077af94 --- /dev/null +++ b/changelog.d/64.bugfix @@ -0,0 +1 @@ +Ensure a `RoomAccessRules` test doesn't accidentally modify a room's access rule and then test that room assuming its access rule has not changed. diff --git a/changelog.d/65.bugfix b/changelog.d/65.bugfix new file mode 100644 index 0000000000..71b498cbc8 --- /dev/null +++ b/changelog.d/65.bugfix @@ -0,0 +1 @@ +Fix `nextLink` parameters being checked on validation endpoints even if they weren't provided by the client. \ No newline at end of file diff --git a/changelog.d/66.bugfix b/changelog.d/66.bugfix new file mode 100644 index 0000000000..9547cfeddd --- /dev/null +++ b/changelog.d/66.bugfix @@ -0,0 +1 @@ +Create a mapping between user ID and threepid when binding via the internal Sydent bind API. \ No newline at end of file diff --git a/changelog.d/67.misc b/changelog.d/67.misc new file mode 100644 index 0000000000..0a2095e4d4 --- /dev/null +++ b/changelog.d/67.misc @@ -0,0 +1 @@ +Merge mainline Synapse v1.21.2 into 'dinsic'. \ No newline at end of file diff --git a/changelog.d/6739.feature b/changelog.d/6739.feature new file mode 100644 index 0000000000..9c41140194 --- /dev/null +++ b/changelog.d/6739.feature @@ -0,0 +1 @@ +Implement "room knocking" as per [MSC2403](https://github.com/matrix-org/matrix-doc/pull/2403). Contributed by Sorunome and anoa. \ No newline at end of file diff --git a/changelog.d/68.misc b/changelog.d/68.misc new file mode 100644 index 0000000000..99cc5f7483 --- /dev/null +++ b/changelog.d/68.misc @@ -0,0 +1 @@ +Override any missing default power level keys with DINUM's defaults when creating a room. \ No newline at end of file diff --git a/changelog.d/71.bugfix b/changelog.d/71.bugfix new file mode 100644 index 0000000000..cad69c7bd2 --- /dev/null +++ b/changelog.d/71.bugfix @@ -0,0 +1 @@ +Fix users info for remote users. diff --git a/changelog.d/72.bugfix b/changelog.d/72.bugfix new file mode 100644 index 0000000000..7ebd16f437 --- /dev/null +++ b/changelog.d/72.bugfix @@ -0,0 +1 @@ +Update the version of mypy to 0.790. diff --git a/changelog.d/9.misc b/changelog.d/9.misc new file mode 100644 index 0000000000..24fd12c978 --- /dev/null +++ b/changelog.d/9.misc @@ -0,0 +1 @@ +Add SyTest to the BuildKite CI. diff --git a/changelog.d/9084.bugfix b/changelog.d/9084.bugfix new file mode 100644 index 0000000000..415dd8b259 --- /dev/null +++ b/changelog.d/9084.bugfix @@ -0,0 +1 @@ +Don't blacklist connections to the configured proxy. Contributed by @Bubu. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index ae476d19ac..3e9def56e0 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -479,6 +479,74 @@ limit_remote_rooms: # #allow_per_room_profiles: false +# Whether to show the users on this homeserver in the user directory. Defaults to +# 'true'. +# +#show_users_in_user_directory: false + +# Message retention policy at the server level. +# +# Room admins and mods can define a retention period for their rooms using the +# 'm.room.retention' state event, and server admins can cap this period by setting +# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options. +# +# If this feature is enabled, Synapse will regularly look for and purge events +# which are older than the room's maximum retention period. Synapse will also +# filter events received over federation so that events that should have been +# purged are ignored and not stored again. +# +retention: + # The message retention policies feature is disabled by default. Uncomment the + # following line to enable it. + # + #enabled: true + + # Default retention policy. If set, Synapse will apply it to rooms that lack the + # 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't + # matter much because Synapse doesn't take it into account yet. + # + #default_policy: + # min_lifetime: 1d + # max_lifetime: 1y + + # Retention policy limits. If set, a user won't be able to send a + # 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime' + # that's not within this range. This is especially useful in closed federations, + # in which server admins can make sure every federating server applies the same + # rules. + # + #allowed_lifetime_min: 1d + #allowed_lifetime_max: 1y + + # Server admins can define the settings of the background jobs purging the + # events which lifetime has expired under the 'purge_jobs' section. + # + # If no configuration is provided, a single job will be set up to delete expired + # events in every room daily. + # + # Each job's configuration defines which range of message lifetimes the job + # takes care of. For example, if 'shortest_max_lifetime' is '2d' and + # 'longest_max_lifetime' is '3d', the job will handle purging expired events in + # rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and + # lower than or equal to 3 days. Both the minimum and the maximum value of a + # range are optional, e.g. a job with no 'shortest_max_lifetime' and a + # 'longest_max_lifetime' of '3d' will handle every room with a retention policy + # which 'max_lifetime' is lower than or equal to three days. + # + # The rationale for this per-job configuration is that some rooms might have a + # retention policy with a low 'max_lifetime', where history needs to be purged + # of outdated messages on a very frequent basis (e.g. every 5min), but not want + # that purge to be performed by a job that's iterating over every room it knows, + # which would be quite heavy on the server. + # + #purge_jobs: + # - shortest_max_lifetime: 1d + # longest_max_lifetime: 3d + # interval: 5m: + # - shortest_max_lifetime: 3d + # longest_max_lifetime: 1y + # interval: 24h + # How long to keep redacted events in unredacted form in the database. After # this period redacted events get replaced with their redacted form in the DB. # @@ -841,6 +909,8 @@ log_config: "CONFDIR/SERVERNAME.log.config" # - one for login that ratelimits login requests based on the account the # client is attempting to log into, based on the amount of failed login # attempts for this account. +# - one that ratelimits third-party invites requests based on the account +# that's making the requests. # - one for ratelimiting redactions by room admins. If this is not explicitly # set then it uses the same ratelimiting as per rc_message. This is useful # to allow room admins to deal with abuse quickly. @@ -877,6 +947,10 @@ log_config: "CONFDIR/SERVERNAME.log.config" # per_second: 0.17 # burst_count: 3 # +#rc_third_party_invite: +# per_second: 0.2 +# burst_count: 10 +# #rc_admin_redaction: # per_second: 1 # burst_count: 50 @@ -966,6 +1040,30 @@ media_store_path: "DATADIR/media_store" # #max_upload_size: 50M +# The largest allowed size for a user avatar. If not defined, no +# restriction will be imposed. +# +# Note that this only applies when an avatar is changed globally. +# Per-room avatar changes are not affected. See allow_per_room_profiles +# for disabling that functionality. +# +# Note that user avatar changes will not work if this is set without +# using Synapse's local media repo. +# +#max_avatar_size: 10M + +# Allow mimetypes for a user avatar. If not defined, no restriction will +# be imposed. +# +# Note that this only applies when an avatar is changed globally. +# Per-room avatar changes are not affected. See allow_per_room_profiles +# for disabling that functionality. +# +# Note that user avatar changes will not work if this is set without +# using Synapse's local media repo. +# +#allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"] + # Maximum number of pixels that will be thumbnailed # #max_image_pixels: 32M @@ -1220,9 +1318,32 @@ oembed: # #disable_msisdn_registration: true +# Derive the user's matrix ID from a type of 3PID used when registering. +# This overrides any matrix ID the user proposes when calling /register +# The 3PID type should be present in registrations_require_3pid to avoid +# users failing to register if they don't specify the right kind of 3pid. +# +#register_mxid_from_3pid: email + +# Uncomment to set the display name of new users to their email address, +# rather than using the default heuristic. +# +#register_just_use_email_for_display_name: true + # Mandate that users are only allowed to associate certain formats of # 3PIDs with accounts on this server. # +# Use an Identity Server to establish which 3PIDs are allowed to register? +# Overrides allowed_local_3pids below. +# +#check_is_for_allowed_local_3pids: matrix.org +# +# If you are using an IS you can also check whether that IS registers +# pending invites for the given 3PID (and then allow it to sign up on +# the platform): +# +#allow_invited_3pids: false +# #allowed_local_3pids: # - medium: email # pattern: '^[^@]+@matrix\.org$' @@ -1231,6 +1352,11 @@ oembed: # - medium: msisdn # pattern: '\+44' +# If true, stop users from trying to change the 3PIDs associated with +# their accounts. +# +#disable_3pid_changes: false + # Enable 3PIDs lookup requests to identity servers from this server. # #enable_3pid_lookup: true @@ -1271,6 +1397,21 @@ oembed: # #default_identity_server: https://matrix.org +# If enabled, user IDs, display names and avatar URLs will be replicated +# to this server whenever they change. +# This is an experimental API currently implemented by sydent to support +# cross-homeserver user directories. +# +#replicate_user_profiles_to: example.com + +# If enabled, don't let users set their own display names/avatars +# other than for the very first time (unless they are a server admin). +# Useful when provisioning users based on the contents of a 3rd party +# directory and to avoid ambiguities. +# +#disable_set_displayname: false +#disable_set_avatar_url: false + # Handle threepid (email/phone etc) registration and password resets through a set of # *trusted* identity servers. Note that this allows the configured identity server to # reset passwords for accounts! @@ -1398,6 +1539,31 @@ account_threepid_delegates: # #auto_join_rooms_for_guests: false +# Rewrite identity server URLs with a map from one URL to another. Applies to URLs +# provided by clients (which have https:// prepended) and those specified +# in `account_threepid_delegates`. URLs should not feature a trailing slash. +# +#rewrite_identity_server_urls: +# "https://somewhere.example.com": "https://somewhereelse.example.com" + +# When a user registers an account with an email address, it can be useful to +# bind that email address to their mxid on an identity server. Typically, this +# requires the user to validate their email address with the identity server. +# However if Synapse itself is handling email validation on registration, the +# user ends up needing to validate their email twice, which leads to poor UX. +# +# It is possible to force Sydent, one identity server implementation, to bind +# threepids using its internal, unauthenticated bind API: +# https://github.com/matrix-org/sydent/#internal-bind-and-unbind-api +# +# Configure the address of a Sydent server here to have Synapse attempt +# to automatically bind users' emails following registration. The +# internal bind API must be reachable from Synapse, but should NOT be +# exposed to any third party, as it allows the creation of bindings +# without validation. +# +#bind_new_user_emails_to_sydent: https://example.com:8091 + ## Metrics ### @@ -2374,6 +2540,12 @@ user_directory: # #search_all_users: true + # If this is set, user search will be delegated to this ID server instead + # of Synapse performing the search itself. + # This is an experimental API. + # + #defer_to_id_server: https://id.example.com + # Defines whether to prefer local users in search query results. # If True, local users are more likely to appear above remote users # when searching the user directory. Defaults to false. diff --git a/res/templates-dinsic/mail-Vector.css b/res/templates-dinsic/mail-Vector.css new file mode 100644 index 0000000000..6a3e36eda1 --- /dev/null +++ b/res/templates-dinsic/mail-Vector.css @@ -0,0 +1,7 @@ +.header { + border-bottom: 4px solid #e4f7ed ! important; +} + +.notif_link a, .footer a { + color: #76CFA6 ! important; +} diff --git a/res/templates-dinsic/mail.css b/res/templates-dinsic/mail.css new file mode 100644 index 0000000000..5ab3e1b06d --- /dev/null +++ b/res/templates-dinsic/mail.css @@ -0,0 +1,156 @@ +body { + margin: 0px; +} + +pre, code { + word-break: break-word; + white-space: pre-wrap; +} + +#page { + font-family: 'Open Sans', Helvetica, Arial, Sans-Serif; + font-color: #454545; + font-size: 12pt; + width: 100%; + padding: 20px; +} + +#inner { + width: 640px; +} + +.header { + width: 100%; + height: 87px; + color: #454545; + border-bottom: 4px solid #e5e5e5; +} + +.logo { + text-align: right; + margin-left: 20px; +} + +.salutation { + padding-top: 10px; + font-weight: bold; +} + +.summarytext { +} + +.room { + width: 100%; + color: #454545; + border-bottom: 1px solid #e5e5e5; +} + +.room_header td { + padding-top: 38px; + padding-bottom: 10px; + border-bottom: 1px solid #e5e5e5; +} + +.room_name { + vertical-align: middle; + font-size: 18px; + font-weight: bold; +} + +.room_header h2 { + margin-top: 0px; + margin-left: 75px; + font-size: 20px; +} + +.room_avatar { + width: 56px; + line-height: 0px; + text-align: center; + vertical-align: middle; +} + +.room_avatar img { + width: 48px; + height: 48px; + object-fit: cover; + border-radius: 24px; +} + +.notif { + border-bottom: 1px solid #e5e5e5; + margin-top: 16px; + padding-bottom: 16px; +} + +.historical_message .sender_avatar { + opacity: 0.3; +} + +/* spell out opacity and historical_message class names for Outlook aka Word */ +.historical_message .sender_name { + color: #e3e3e3; +} + +.historical_message .message_time { + color: #e3e3e3; +} + +.historical_message .message_body { + color: #c7c7c7; +} + +.historical_message td, +.message td { + padding-top: 10px; +} + +.sender_avatar { + width: 56px; + text-align: center; + vertical-align: top; +} + +.sender_avatar img { + margin-top: -2px; + width: 32px; + height: 32px; + border-radius: 16px; +} + +.sender_name { + display: inline; + font-size: 13px; + color: #a2a2a2; +} + +.message_time { + text-align: right; + width: 100px; + font-size: 11px; + color: #a2a2a2; +} + +.message_body { +} + +.notif_link td { + padding-top: 10px; + padding-bottom: 10px; + font-weight: bold; +} + +.notif_link a, .footer a { + color: #454545; + text-decoration: none; +} + +.debug { + font-size: 10px; + color: #888; +} + +.footer { + margin-top: 20px; + text-align: center; +} \ No newline at end of file diff --git a/res/templates-dinsic/notif.html b/res/templates-dinsic/notif.html new file mode 100644 index 0000000000..bcdfeea9da --- /dev/null +++ b/res/templates-dinsic/notif.html @@ -0,0 +1,45 @@ +{% for message in notif.messages %} + <tr class="{{ "historical_message" if message.is_historical else "message" }}"> + <td class="sender_avatar"> + {% if loop.index0 == 0 or notif.messages[loop.index0 - 1].sender_name != notif.messages[loop.index0].sender_name %} + {% if message.sender_avatar_url %} + <img alt="" class="sender_avatar" src="{{ message.sender_avatar_url|mxc_to_http(32,32) }}" /> + {% else %} + {% if message.sender_hash % 3 == 0 %} + <img class="sender_avatar" src="https://vector.im/beta/img/76cfa6.png" /> + {% elif message.sender_hash % 3 == 1 %} + <img class="sender_avatar" src="https://vector.im/beta/img/50e2c2.png" /> + {% else %} + <img class="sender_avatar" src="https://vector.im/beta/img/f4c371.png" /> + {% endif %} + {% endif %} + {% endif %} + </td> + <td class="message_contents"> + {% if loop.index0 == 0 or notif.messages[loop.index0 - 1].sender_name != notif.messages[loop.index0].sender_name %} + <div class="sender_name">{% if message.msgtype == "m.emote" %}*{% endif %} {{ message.sender_name }}</div> + {% endif %} + <div class="message_body"> + {% if message.msgtype == "m.text" %} + {{ message.body_text_html }} + {% elif message.msgtype == "m.emote" %} + {{ message.body_text_html }} + {% elif message.msgtype == "m.notice" %} + {{ message.body_text_html }} + {% elif message.msgtype == "m.image" %} + <img src="{{ message.image_url|mxc_to_http(640, 480, scale) }}" /> + {% elif message.msgtype == "m.file" %} + <span class="filename">{{ message.body_text_plain }}</span> + {% endif %} + </div> + </td> + <td class="message_time">{{ message.ts|format_ts("%H:%M") }}</td> + </tr> +{% endfor %} +<tr class="notif_link"> + <td></td> + <td> + <a href="{{ notif.link }}">Voir {{ room.title }}</a> + </td> + <td></td> +</tr> diff --git a/res/templates-dinsic/notif.txt b/res/templates-dinsic/notif.txt new file mode 100644 index 0000000000..3dff1bb570 --- /dev/null +++ b/res/templates-dinsic/notif.txt @@ -0,0 +1,16 @@ +{% for message in notif.messages %} +{% if message.msgtype == "m.emote" %}* {% endif %}{{ message.sender_name }} ({{ message.ts|format_ts("%H:%M") }}) +{% if message.msgtype == "m.text" %} +{{ message.body_text_plain }} +{% elif message.msgtype == "m.emote" %} +{{ message.body_text_plain }} +{% elif message.msgtype == "m.notice" %} +{{ message.body_text_plain }} +{% elif message.msgtype == "m.image" %} +{{ message.body_text_plain }} +{% elif message.msgtype == "m.file" %} +{{ message.body_text_plain }} +{% endif %} +{% endfor %} + +Voir {{ room.title }} à {{ notif.link }} diff --git a/res/templates-dinsic/notif_mail.html b/res/templates-dinsic/notif_mail.html new file mode 100644 index 0000000000..1e1efa74b2 --- /dev/null +++ b/res/templates-dinsic/notif_mail.html @@ -0,0 +1,55 @@ +<!doctype html> +<html lang="en"> + <head> + <style type="text/css"> + {% include 'mail.css' without context %} + {% include "mail-%s.css" % app_name ignore missing without context %} + </style> + </head> + <body> + <table id="page"> + <tr> + <td> </td> + <td id="inner"> + <table class="header"> + <tr> + <td> + <div class="salutation">Bonjour {{ user_display_name }},</div> + <div class="summarytext">{{ summary_text }}</div> + </td> + <td class="logo"> + {% if app_name == "Riot" %} + <img src="http://matrix.org/img/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> + {% elif app_name == "Vector" %} + <img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> + {% else %} + <img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> + {% endif %} + </td> + </tr> + </table> + {% for room in rooms %} + {% include 'room.html' with context %} + {% endfor %} + <div class="footer"> + <a href="{{ unsubscribe_link }}">Se désinscrire</a> + <br/> + <br/> + <div class="debug"> + Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because + an event was received at {{ reason.received_at|format_ts("%c") }} + which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago, + {% if reason.last_sent_ts %} + and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }}, + which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago. + {% else %} + and we don't have a last time we sent a mail for this room. + {% endif %} + </div> + </div> + </td> + <td> </td> + </tr> + </table> + </body> +</html> diff --git a/res/templates-dinsic/notif_mail.txt b/res/templates-dinsic/notif_mail.txt new file mode 100644 index 0000000000..fae877426f --- /dev/null +++ b/res/templates-dinsic/notif_mail.txt @@ -0,0 +1,10 @@ +Bonjour {{ user_display_name }}, + +{{ summary_text }} + +{% for room in rooms %} +{% include 'room.txt' with context %} +{% endfor %} + +Vous pouvez désactiver ces notifications en cliquant ici {{ unsubscribe_link }} + diff --git a/res/templates-dinsic/room.html b/res/templates-dinsic/room.html new file mode 100644 index 0000000000..0487b1b11c --- /dev/null +++ b/res/templates-dinsic/room.html @@ -0,0 +1,33 @@ +<table class="room"> + <tr class="room_header"> + <td class="room_avatar"> + {% if room.avatar_url %} + <img alt="" src="{{ room.avatar_url|mxc_to_http(48,48) }}" /> + {% else %} + {% if room.hash % 3 == 0 %} + <img alt="" src="https://vector.im/beta/img/76cfa6.png" /> + {% elif room.hash % 3 == 1 %} + <img alt="" src="https://vector.im/beta/img/50e2c2.png" /> + {% else %} + <img alt="" src="https://vector.im/beta/img/f4c371.png" /> + {% endif %} + {% endif %} + </td> + <td class="room_name" colspan="2"> + {{ room.title }} + </td> + </tr> + {% if room.invite %} + <tr> + <td></td> + <td> + <a href="{{ room.link }}">Rejoindre la conversation.</a> + </td> + <td></td> + </tr> + {% else %} + {% for notif in room.notifs %} + {% include 'notif.html' with context %} + {% endfor %} + {% endif %} +</table> diff --git a/res/templates-dinsic/room.txt b/res/templates-dinsic/room.txt new file mode 100644 index 0000000000..dd36d01d21 --- /dev/null +++ b/res/templates-dinsic/room.txt @@ -0,0 +1,9 @@ +{{ room.title }} + +{% if room.invite %} + Vous avez été invité, rejoignez la conversation en cliquant sur le lien suivant {{ room.link }} +{% else %} + {% for notif in room.notifs %} + {% include 'notif.txt' with context %} + {% endfor %} +{% endif %} diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment index af4de345df..25bd75ad1f 100755 --- a/scripts-dev/check-newsfragment +++ b/scripts-dev/check-newsfragment @@ -7,9 +7,9 @@ echo -e "+++ \033[32mChecking newsfragment\033[m" set -e -# make sure that origin/develop is up to date -git remote set-branches --add origin develop -git fetch -q origin develop +# make sure that origin/dinsic is up to date +git remote set-branches --add origin dinsic +git fetch -q origin dinsic pr="$PULL_REQUEST_NUMBER" diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index 640ff15277..f6eede6ccc 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -48,6 +48,7 @@ from synapse.storage.databases.main.media_repository import ( MediaRepositoryBackgroundUpdateStore, ) from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore +from synapse.storage.databases.main.profile import ProfileStore from synapse.storage.databases.main.pusher import PusherWorkerStore from synapse.storage.databases.main.registration import ( RegistrationBackgroundUpdateStore, @@ -171,6 +172,7 @@ class Store( DeviceBackgroundUpdateStore, EventsBackgroundUpdatesStore, MediaRepositoryBackgroundUpdateStore, + ProfileStore, RegistrationBackgroundUpdateStore, RoomBackgroundUpdateStore, RoomMemberBackgroundUpdateStore, diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 44883c6663..b98b818d18 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -268,11 +268,12 @@ class Auth: 403, "Application service cannot masquerade as this user (%s)." % user_id, ) + # Let ASes manipulate nonexistent users (e.g. to shadow-register them) # Check to make sure the user is already registered on the homeserver - elif not (await self.store.get_user_by_id(user_id)): - raise AuthError( - 403, "Application service has not registered this user (%s)" % user_id - ) + # elif not (await self.store.get_user_by_id(user_id)): + # raise AuthError( + # 403, "Application service has not registered this user (%s)" % user_id + # ) async def _get_appservice_user_id( self, request: Request @@ -280,6 +281,7 @@ class Auth: app_service = self.store.get_app_service_by_token( self.get_access_token_from_request(request) ) + if app_service is None: return None, None diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 85302163da..b6b9ef5b8f 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -1,5 +1,6 @@ # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 0a895bba48..c086dedc29 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -271,6 +271,7 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = { RoomVersions.V4, RoomVersions.V5, RoomVersions.V6, + RoomVersions.V7, RoomVersions.MSC2176, RoomVersions.V7, RoomVersions.V8, diff --git a/synapse/config/api.py b/synapse/config/api.py index b18044f982..972f1ffc76 100644 --- a/synapse/config/api.py +++ b/synapse/config/api.py @@ -1,4 +1,4 @@ -# Copyright 2015-2021 The Matrix.org Foundation C.I.C. +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 36636ab07e..2bad0ce535 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -79,6 +79,9 @@ class RatelimitConfig(Config): ) self.rc_registration = RateLimitConfig(config.get("rc_registration", {})) + self.rc_third_party_invite = RateLimitConfig( + config.get("rc_third_party_invite", {}) + ) self.rc_registration_token_validity = RateLimitConfig( config.get("rc_registration_token_validity", {}), @@ -158,6 +161,8 @@ class RatelimitConfig(Config): # - one for login that ratelimits login requests based on the account the # client is attempting to log into, based on the amount of failed login # attempts for this account. + # - one that ratelimits third-party invites requests based on the account + # that's making the requests. # - one for ratelimiting redactions by room admins. If this is not explicitly # set then it uses the same ratelimiting as per rc_message. This is useful # to allow room admins to deal with abuse quickly. @@ -194,6 +199,10 @@ class RatelimitConfig(Config): # per_second: 0.17 # burst_count: 3 # + #rc_third_party_invite: + # per_second: 0.2 + # burst_count: 10 + # #rc_admin_redaction: # per_second: 1 # burst_count: 50 diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 61e569d412..20ad339874 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -32,16 +32,34 @@ class RegistrationConfig(Config): self.registrations_require_3pid = config.get("registrations_require_3pid", []) self.allowed_local_3pids = config.get("allowed_local_3pids", []) + self.check_is_for_allowed_local_3pids = config.get( + "check_is_for_allowed_local_3pids", None + ) + self.allow_invited_3pids = config.get("allow_invited_3pids", False) + + self.disable_3pid_changes = config.get("disable_3pid_changes", False) + self.enable_3pid_lookup = config.get("enable_3pid_lookup", True) self.registration_requires_token = config.get( "registration_requires_token", False ) self.registration_shared_secret = config.get("registration_shared_secret") + self.register_mxid_from_3pid = config.get("register_mxid_from_3pid") + self.register_just_use_email_for_display_name = config.get( + "register_just_use_email_for_display_name", False + ) self.bcrypt_rounds = config.get("bcrypt_rounds", 12) account_threepid_delegates = config.get("account_threepid_delegates") or {} self.account_threepid_delegate_email = account_threepid_delegates.get("email") + if ( + self.account_threepid_delegate_email + and not self.account_threepid_delegate_email.startswith("http") + ): + raise ConfigError( + "account_threepid_delegates.email must begin with http:// or https://" + ) self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn") self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) @@ -103,6 +121,14 @@ class RegistrationConfig(Config): self.enable_set_avatar_url = config.get("enable_set_avatar_url", True) self.enable_3pid_changes = config.get("enable_3pid_changes", True) + self.replicate_user_profiles_to = config.get("replicate_user_profiles_to", []) + if not isinstance(self.replicate_user_profiles_to, list): + self.replicate_user_profiles_to = [self.replicate_user_profiles_to] + + self.rewrite_identity_server_urls = ( + config.get("rewrite_identity_server_urls") or {} + ) + self.disable_msisdn_registration = config.get( "disable_msisdn_registration", False ) @@ -146,6 +172,23 @@ class RegistrationConfig(Config): # The success template used during fallback auth. self.fallback_success_template = self.read_template("auth_success.html") + self.bind_new_user_emails_to_sydent = config.get( + "bind_new_user_emails_to_sydent" + ) + + if self.bind_new_user_emails_to_sydent: + if not isinstance( + self.bind_new_user_emails_to_sydent, str + ) or not self.bind_new_user_emails_to_sydent.startswith("http"): + raise ConfigError( + "Option bind_new_user_emails_to_sydent has invalid value" + ) + + # Remove trailing slashes + self.bind_new_user_emails_to_sydent = ( + self.bind_new_user_emails_to_sydent.strip("/") + ) + def generate_config_section(self, generate_secrets=False, **kwargs): if generate_secrets: registration_shared_secret = 'registration_shared_secret: "%s"' % ( @@ -187,9 +230,32 @@ class RegistrationConfig(Config): # #disable_msisdn_registration: true + # Derive the user's matrix ID from a type of 3PID used when registering. + # This overrides any matrix ID the user proposes when calling /register + # The 3PID type should be present in registrations_require_3pid to avoid + # users failing to register if they don't specify the right kind of 3pid. + # + #register_mxid_from_3pid: email + + # Uncomment to set the display name of new users to their email address, + # rather than using the default heuristic. + # + #register_just_use_email_for_display_name: true + # Mandate that users are only allowed to associate certain formats of # 3PIDs with accounts on this server. # + # Use an Identity Server to establish which 3PIDs are allowed to register? + # Overrides allowed_local_3pids below. + # + #check_is_for_allowed_local_3pids: matrix.org + # + # If you are using an IS you can also check whether that IS registers + # pending invites for the given 3PID (and then allow it to sign up on + # the platform): + # + #allow_invited_3pids: false + # #allowed_local_3pids: # - medium: email # pattern: '^[^@]+@matrix\\.org$' @@ -198,6 +264,11 @@ class RegistrationConfig(Config): # - medium: msisdn # pattern: '\\+44' + # If true, stop users from trying to change the 3PIDs associated with + # their accounts. + # + #disable_3pid_changes: false + # Enable 3PIDs lookup requests to identity servers from this server. # #enable_3pid_lookup: true @@ -238,6 +309,21 @@ class RegistrationConfig(Config): # #default_identity_server: https://matrix.org + # If enabled, user IDs, display names and avatar URLs will be replicated + # to this server whenever they change. + # This is an experimental API currently implemented by sydent to support + # cross-homeserver user directories. + # + #replicate_user_profiles_to: example.com + + # If enabled, don't let users set their own display names/avatars + # other than for the very first time (unless they are a server admin). + # Useful when provisioning users based on the contents of a 3rd party + # directory and to avoid ambiguities. + # + #disable_set_displayname: false + #disable_set_avatar_url: false + # Handle threepid (email/phone etc) registration and password resets through a set of # *trusted* identity servers. Note that this allows the configured identity server to # reset passwords for accounts! @@ -364,6 +450,31 @@ class RegistrationConfig(Config): # Defaults to true. # #auto_join_rooms_for_guests: false + + # Rewrite identity server URLs with a map from one URL to another. Applies to URLs + # provided by clients (which have https:// prepended) and those specified + # in `account_threepid_delegates`. URLs should not feature a trailing slash. + # + #rewrite_identity_server_urls: + # "https://somewhere.example.com": "https://somewhereelse.example.com" + + # When a user registers an account with an email address, it can be useful to + # bind that email address to their mxid on an identity server. Typically, this + # requires the user to validate their email address with the identity server. + # However if Synapse itself is handling email validation on registration, the + # user ends up needing to validate their email twice, which leads to poor UX. + # + # It is possible to force Sydent, one identity server implementation, to bind + # threepids using its internal, unauthenticated bind API: + # https://github.com/matrix-org/sydent/#internal-bind-and-unbind-api + # + # Configure the address of a Sydent server here to have Synapse attempt + # to automatically bind users' emails following registration. The + # internal bind API must be reachable from Synapse, but should NOT be + # exposed to any third party, as it allows the creation of bindings + # without validation. + # + #bind_new_user_emails_to_sydent: https://example.com:8091 """ % locals() ) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 69906a98d4..0669eaefe5 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -112,6 +112,12 @@ class ContentRepositoryConfig(Config): self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M")) self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M")) + self.max_avatar_size = config.get("max_avatar_size") + if self.max_avatar_size: + self.max_avatar_size = self.parse_size(self.max_avatar_size) + + self.allowed_avatar_mimetypes = config.get("allowed_avatar_mimetypes", []) + self.media_store_path = self.ensure_directory( config.get("media_store_path", "media_store") ) @@ -266,6 +272,30 @@ class ContentRepositoryConfig(Config): # #max_upload_size: 50M + # The largest allowed size for a user avatar. If not defined, no + # restriction will be imposed. + # + # Note that this only applies when an avatar is changed globally. + # Per-room avatar changes are not affected. See allow_per_room_profiles + # for disabling that functionality. + # + # Note that user avatar changes will not work if this is set without + # using Synapse's local media repo. + # + #max_avatar_size: 10M + + # Allow mimetypes for a user avatar. If not defined, no restriction will + # be imposed. + # + # Note that this only applies when an avatar is changed globally. + # Per-room avatar changes are not affected. See allow_per_room_profiles + # for disabling that functionality. + # + # Note that user avatar changes will not work if this is set without + # using Synapse's local media repo. + # + #allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"] + # Maximum number of pixels that will be thumbnailed # #max_image_pixels: 32M diff --git a/synapse/config/server.py b/synapse/config/server.py index 8445e9dd05..4381a830a0 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -488,6 +488,12 @@ class ServerConfig(Config): # events with profile information that differ from the target's global profile. self.allow_per_room_profiles = config.get("allow_per_room_profiles", True) + # Whether to show the users on this homeserver in the user directory. Defaults to + # True. + self.show_users_in_user_directory = config.get( + "show_users_in_user_directory", True + ) + self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])] # no_tls is not really supported any more, but let's grandfather it in @@ -1166,6 +1172,74 @@ class ServerConfig(Config): # #allow_per_room_profiles: false + # Whether to show the users on this homeserver in the user directory. Defaults to + # 'true'. + # + #show_users_in_user_directory: false + + # Message retention policy at the server level. + # + # Room admins and mods can define a retention period for their rooms using the + # 'm.room.retention' state event, and server admins can cap this period by setting + # the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options. + # + # If this feature is enabled, Synapse will regularly look for and purge events + # which are older than the room's maximum retention period. Synapse will also + # filter events received over federation so that events that should have been + # purged are ignored and not stored again. + # + retention: + # The message retention policies feature is disabled by default. Uncomment the + # following line to enable it. + # + #enabled: true + + # Default retention policy. If set, Synapse will apply it to rooms that lack the + # 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't + # matter much because Synapse doesn't take it into account yet. + # + #default_policy: + # min_lifetime: 1d + # max_lifetime: 1y + + # Retention policy limits. If set, a user won't be able to send a + # 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime' + # that's not within this range. This is especially useful in closed federations, + # in which server admins can make sure every federating server applies the same + # rules. + # + #allowed_lifetime_min: 1d + #allowed_lifetime_max: 1y + + # Server admins can define the settings of the background jobs purging the + # events which lifetime has expired under the 'purge_jobs' section. + # + # If no configuration is provided, a single job will be set up to delete expired + # events in every room daily. + # + # Each job's configuration defines which range of message lifetimes the job + # takes care of. For example, if 'shortest_max_lifetime' is '2d' and + # 'longest_max_lifetime' is '3d', the job will handle purging expired events in + # rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and + # lower than or equal to 3 days. Both the minimum and the maximum value of a + # range are optional, e.g. a job with no 'shortest_max_lifetime' and a + # 'longest_max_lifetime' of '3d' will handle every room with a retention policy + # which 'max_lifetime' is lower than or equal to three days. + # + # The rationale for this per-job configuration is that some rooms might have a + # retention policy with a low 'max_lifetime', where history needs to be purged + # of outdated messages on a very frequent basis (e.g. every 5min), but not want + # that purge to be performed by a job that's iterating over every room it knows, + # which would be quite heavy on the server. + # + #purge_jobs: + # - shortest_max_lifetime: 1d + # longest_max_lifetime: 3d + # interval: 5m: + # - shortest_max_lifetime: 3d + # longest_max_lifetime: 1y + # interval: 24h + # How long to keep redacted events in unredacted form in the database. After # this period redacted events get replaced with their redacted form in the DB. # diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py index 6d6678c7e4..8dc02d72e6 100644 --- a/synapse/config/user_directory.py +++ b/synapse/config/user_directory.py @@ -28,6 +28,9 @@ class UserDirectoryConfig(Config): self.user_directory_search_all_users = user_directory_config.get( "search_all_users", False ) + self.user_directory_defer_to_id_server = user_directory_config.get( + "defer_to_id_server", None + ) self.user_directory_search_prefer_local_users = user_directory_config.get( "prefer_local_users", False ) @@ -61,6 +64,12 @@ class UserDirectoryConfig(Config): # #search_all_users: true + # If this is set, user search will be delegated to this ID server instead + # of Synapse performing the search itself. + # This is an experimental API. + # + #defer_to_id_server: https://id.example.com + # Defines whether to prefer local users in search query results. # If True, local users are more likely to appear above remote users # when searching the user directory. Defaults to false. diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 10b5aa5af8..a67746d0f3 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -1,5 +1,5 @@ -# Copyright 2014-2021 The Matrix.org Foundation C.I.C. # Copyright 2020 Sorunome +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1213,6 +1213,20 @@ class TransportLayerClient: args={"suggested_only": "true" if suggested_only else "false"}, ) + def get_info_of_users(self, destination: str, user_ids: List[str]): + """ + Args: + destination: The remote server + user_ids: A list of user IDs to query info about + + Returns: + Deferred[List]: A dictionary of User ID to information about that user. + """ + path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/users/info") + data = {"user_ids": user_ids} + + return self.client.post_json(destination=destination, path=path, data=data) + def _create_path(federation_prefix: str, path: str, *args: str) -> str: """ diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/synapse/federation/transport/server.py diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index 2fdf6cc99e..640d8316b7 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -25,6 +25,7 @@ from synapse.federation.transport.server._base import ( BaseFederationServlet, ) from synapse.http.servlet import ( + assert_params_in_dict, parse_boolean_from_args, parse_integer_from_args, parse_string_from_args, @@ -518,6 +519,57 @@ class On3pidBindServlet(BaseFederationServerServlet): return 200, {} +class FederationUserInfoServlet(BaseFederationServlet): + """ + Return information about a set of users. + + This API returns expiration and deactivation information about a set of + users. Requested users not local to this homeserver will be ignored. + + Example request: + POST /users/info + + { + "user_ids": [ + "@alice:example.com", + "@bob:example.com" + ] + } + + Example response + { + "@alice:example.com": { + "expired": false, + "deactivated": true + } + } + """ + + PATH = "/users/info" + PREFIX = FEDERATION_UNSTABLE_PREFIX + + def __init__(self, hs, authenticator, ratelimiter, server_name): + super(FederationUserInfoServlet, self).__init__( + hs, authenticator, ratelimiter, server_name + ) + self._store = hs.get_datastore() + + async def on_POST(self, origin, content, query): + assert_params_in_dict(content, required=["user_ids"]) + + user_ids = content.get("user_ids", []) + + if not isinstance(user_ids, list): + raise SynapseError( + 400, + "'user_ids' must be a list of user ID strings", + errcode=Codes.INVALID_PARAM, + ) + + data = await self._store.get_info_for_users(user_ids) + return 200, data + + class FederationVersionServlet(BaseFederationServlet): PATH = "/version" @@ -699,6 +751,7 @@ FEDERATION_SERVLET_CLASSES: Tuple[Type[BaseFederationServlet], ...] = ( On3pidBindServlet, FederationVersionServlet, RoomComplexityServlet, + FederationUserInfoServlet, FederationSpaceSummaryServlet, FederationRoomHierarchyServlet, FederationV1SendKnockServlet, diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 87e415df75..26c17f537a 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -20,7 +20,10 @@ from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Tuple from twisted.web.http import Request from synapse.api.errors import AuthError, StoreError, SynapseError -from synapse.metrics.background_process_metrics import wrap_as_background_process +from synapse.metrics.background_process_metrics import ( + run_as_background_process, + wrap_as_background_process, +) from synapse.types import UserID from synapse.util import stringutils @@ -56,6 +59,10 @@ class AccountValidityHandler: hs.config.account_validity.account_validity_renew_by_email_enabled ) + self._show_users_in_user_directory = ( + self.hs.config.server.show_users_in_user_directory + ) + self._account_validity_period = None if self._account_validity_enabled: self._account_validity_period = ( @@ -77,6 +84,18 @@ class AccountValidityHandler: if hs.config.worker.run_background_tasks: self.clock.looping_call(self._send_renewal_emails, 30 * 60 * 1000) + # Mark users as inactive when they expired. Check once every hour + if self._account_validity_enabled: + + def mark_expired_users_as_inactive() -> Awaitable: + # run as a background process to allow async functions to work + return run_as_background_process( + "_mark_expired_users_as_inactive", + self._mark_expired_users_as_inactive, + ) + + self.clock.looping_call(mark_expired_users_as_inactive, 60 * 60 * 1000) + self._is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = [] self._on_user_registration_callbacks: List[ON_USER_REGISTRATION_CALLBACK] = [] self._on_legacy_send_mail_callback: Optional[ @@ -329,7 +348,7 @@ class AccountValidityHandler: Returns: A tuple containing: * A bool representing whether the token is valid and unused. - * A bool which is `True` if the token is valid, but stale. + * A bool representing whether the token is stale. * An int representing the user's expiry timestamp as milliseconds since the epoch, or 0 if the token was invalid. """ @@ -405,4 +424,30 @@ class AccountValidityHandler: token_used_ts=now, ) + # Check if renewed users should be reintroduced to the user directory + if self._show_users_in_user_directory: + # Show the user in the directory again by setting them to active + # We get the profile handler here so that we don't cause a cyclic dependency + # at startup. + # FIXME: this doesn't work with the new account validity module stuff, in + # that this code is only run when no account validity module implements the + # on_legacy_renew callback - which synapse-email-account-validity does. + await self.hs.get_profile_handler().set_active( + [UserID.from_string(user_id)], True, True + ) + return expiration_ts + + async def _mark_expired_users_as_inactive(self) -> None: + """Iterate over active, expired users. Mark them as inactive in order to hide them + from the user directory. + """ + # Get active, expired users + active_expired_users = await self.store.get_expired_users() + + # Mark each as non-active + # We get the profile handler here so that we don't cause a cyclic dependency at + # startup. + await self.hs.get_profile_handler().set_active( + active_expired_users, False, True + ) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index bee62cf360..4f3bf4ecc2 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -124,6 +124,11 @@ class DeactivateAccountHandler: # associated with devices, e.g. email pushers. await self.store.delete_all_pushers_for_user(user_id) + # Set the user as no longer active. This will prevent their profile + # from being replicated. + user = UserID.from_string(user_id) + await self._profile_handler.set_active([user], False, False) + # Add the user to a table of users pending deactivation (ie. # removal from all the rooms they're a member of) await self.store.add_user_pending_deactivation(user_id) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index c83eaea359..c57d03f28a 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -1,6 +1,6 @@ # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2018, 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,9 +20,11 @@ import urllib.parse from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Tuple from synapse.api.errors import ( + AuthError, CodeMessageException, Codes, HttpResponseException, + ProxiedRequestError, SynapseError, ) from synapse.api.ratelimiting import Ratelimiter @@ -44,8 +46,6 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -id_server_scheme = "https://" - class IdentityHandler: def __init__(self, hs: "HomeServer"): @@ -61,6 +61,11 @@ class IdentityHandler: self.federation_http_client = hs.get_federation_http_client() self.hs = hs + self.rewrite_identity_server_urls = ( + hs.config.registration.rewrite_identity_server_urls + ) + self._enable_lookup = hs.config.registration.enable_3pid_lookup + self._web_client_location = hs.config.email.invite_client_location # Ratelimiters for `/requestToken` endpoints. @@ -99,14 +104,14 @@ class IdentityHandler: ) async def threepid_from_creds( - self, id_server: str, creds: Dict[str, str] + self, id_server_url: str, creds: Dict[str, str] ) -> Optional[JsonDict]: """ Retrieve and validate a threepid identifier from a "credentials" dictionary against a given identity server Args: - id_server: The identity server to validate 3PIDs against. Must be a + id_server_url: The identity server to validate 3PIDs against. Must be a complete URL including the protocol (http(s)://) creds: Dictionary containing the following keys: * client_secret|clientSecret: A unique secret str provided by the client @@ -131,7 +136,14 @@ class IdentityHandler: query_params = {"sid": session_id, "client_secret": client_secret} - url = id_server + "/_matrix/identity/api/v1/3pid/getValidated3pid" + # if we have a rewrite rule set for the identity server, + # apply it now. + id_server_url = self.rewrite_id_server_url(id_server_url) + + url = "%s%s" % ( + id_server_url, + "/_matrix/identity/api/v1/3pid/getValidated3pid", + ) try: data = await self.http_client.get_json(url, query_params) @@ -140,7 +152,7 @@ class IdentityHandler: except HttpResponseException as e: logger.info( "%s returned %i for threepid validation for: %s", - id_server, + id_server_url, e.code, creds, ) @@ -154,7 +166,7 @@ class IdentityHandler: if "medium" in data: return data - logger.info("%s reported non-validated threepid: %s", id_server, creds) + logger.info("%s reported non-validated threepid: %s", id_server_url, creds) return None async def bind_threepid( @@ -197,14 +209,19 @@ class IdentityHandler: "id_server must be a valid hostname with optional port and path components", ) + # if we have a rewrite rule set for the identity server, + # apply it now, but only for sending the request (not + # storing in the database). + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + # Decide which API endpoint URLs to use headers = {} bind_data = {"sid": sid, "client_secret": client_secret, "mxid": mxid} if use_v2: - bind_url = "https://%s/_matrix/identity/v2/3pid/bind" % (id_server,) + bind_url = "%s/_matrix/identity/v2/3pid/bind" % (id_server_url,) headers["Authorization"] = create_id_access_token_header(id_access_token) # type: ignore else: - bind_url = "https://%s/_matrix/identity/api/v1/3pid/bind" % (id_server,) + bind_url = "%s/_matrix/identity/api/v1/3pid/bind" % (id_server_url,) try: # Use the blacklisting http client as this call is only to identity servers @@ -311,6 +328,7 @@ class IdentityHandler: # we abuse the federation http client to sign the request, but we have to send it # using the normal http client since we don't want the SRV lookup and want normal # 'browser-like' HTTPS. + url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii") auth_headers = self.federation_http_client.build_auth_headers( destination=None, method=b"POST", @@ -320,6 +338,18 @@ class IdentityHandler: ) headers = {b"Authorization": auth_headers} + # if we have a rewrite rule set for the identity server, + # apply it now. + # + # Note that destination_is has to be the real id_server, not + # the server we connect to. + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + + if self.hs.config.registration.bind_new_user_emails_to_sydent: + id_server_url = self.hs.config.registration.bind_new_user_emails_to_sydent + + url = "%s/_matrix/identity/api/v1/3pid/unbind" % (id_server_url,) + try: # Use the blacklisting http client as this call is only to identity servers # provided by a client @@ -434,9 +464,28 @@ class IdentityHandler: return session_id + def rewrite_id_server_url(self, url: str, add_https: bool = False) -> str: + """Given an identity server URL, optionally add a protocol scheme + before rewriting it according to the rewrite_identity_server_urls + config option + + Adds https:// to the URL if specified, then tries to rewrite the + url. Returns either the rewritten URL or the URL with optional + protocol scheme additions. + """ + rewritten_url = url + if add_https: + rewritten_url = "https://" + rewritten_url + + rewritten_url = self.rewrite_identity_server_urls.get( + rewritten_url, rewritten_url + ) + logger.debug("Rewriting identity server rule from %s to %s", url, rewritten_url) + return rewritten_url + async def requestEmailToken( self, - id_server: str, + id_server_url: str, email: str, client_secret: str, send_attempt: int, @@ -447,7 +496,7 @@ class IdentityHandler: validation. Args: - id_server: The identity server to proxy to + id_server_url: The identity server to proxy to email: The email to send the message to client_secret: The unique client_secret sends by the user send_attempt: Which attempt this is @@ -461,12 +510,18 @@ class IdentityHandler: "client_secret": client_secret, "send_attempt": send_attempt, } + + # if we have a rewrite rule set for the identity server, + # apply it now. + id_server_url = self.rewrite_id_server_url(id_server_url) + if next_link: params["next_link"] = next_link try: data = await self.http_client.post_json_get_json( - id_server + "/_matrix/identity/api/v1/validate/email/requestToken", + "%s/_matrix/identity/api/v1/validate/email/requestToken" + % (id_server_url,), params, ) return data @@ -478,7 +533,7 @@ class IdentityHandler: async def requestMsisdnToken( self, - id_server: str, + id_server_url: str, country: str, phone_number: str, client_secret: str, @@ -489,7 +544,7 @@ class IdentityHandler: Request an external server send an SMS message on our behalf for the purposes of threepid validation. Args: - id_server: The identity server to proxy to + id_server_url: The identity server to proxy to country: The country code of the phone number phone_number: The number to send the message to client_secret: The unique client_secret sends by the user @@ -508,9 +563,13 @@ class IdentityHandler: if next_link: params["next_link"] = next_link + # if we have a rewrite rule set for the identity server, + # apply it now. + id_server_url = self.rewrite_id_server_url(id_server_url) try: data = await self.http_client.post_json_get_json( - id_server + "/_matrix/identity/api/v1/validate/msisdn/requestToken", + "%s/_matrix/identity/api/v1/validate/msisdn/requestToken" + % (id_server_url,), params, ) except HttpResponseException as e: @@ -609,6 +668,86 @@ class IdentityHandler: logger.warning("Error contacting msisdn account_threepid_delegate: %s", e) raise SynapseError(400, "Error contacting the identity server") + # TODO: The following two methods are used for proxying IS requests using + # the CS API. They should be consolidated with those in RoomMemberHandler + # https://github.com/matrix-org/synapse-dinsic/issues/25 + + async def proxy_lookup_3pid( + self, id_server: str, medium: str, address: str + ) -> JsonDict: + """Looks up a 3pid in the passed identity server. + + Args: + id_server: The server name (including port, if required) + of the identity server to use. + medium: The type of the third party identifier (e.g. "email"). + address: The third party identifier (e.g. "foo@example.com"). + + Returns: + The result of the lookup. See + https://matrix.org/docs/spec/identity_service/r0.1.0.html#association-lookup + for details + """ + if not self._enable_lookup: + raise AuthError( + 403, "Looking up third-party identifiers is denied from this server" + ) + + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + + try: + data = await self.http_client.get_json( + "%s/_matrix/identity/api/v1/lookup" % (id_server_url,), + {"medium": medium, "address": address}, + ) + + except HttpResponseException as e: + logger.info("Proxied lookup failed: %r", e) + raise e.to_synapse_error() + except IOError as e: + logger.info("Failed to contact %s: %s", id_server, e) + raise ProxiedRequestError(503, "Failed to contact identity server") + + return data + + async def proxy_bulk_lookup_3pid( + self, id_server: str, threepids: List[List[str]] + ) -> JsonDict: + """Looks up given 3pids in the passed identity server. + + Args: + id_server: The server name (including port, if required) + of the identity server to use. + threepids: The third party identifiers to lookup, as + a list of 2-string sized lists ([medium, address]). + + Returns: + The result of the lookup. See + https://matrix.org/docs/spec/identity_service/r0.1.0.html#association-lookup + for details + """ + if not self._enable_lookup: + raise AuthError( + 403, "Looking up third-party identifiers is denied from this server" + ) + + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + + try: + data = await self.http_client.post_json_get_json( + "%s/_matrix/identity/api/v1/bulk_lookup" % (id_server_url,), + {"threepids": threepids}, + ) + + except HttpResponseException as e: + logger.info("Proxied lookup failed: %r", e) + raise e.to_synapse_error() + except IOError as e: + logger.info("Failed to contact %s: %s", id_server, e) + raise ProxiedRequestError(503, "Failed to contact identity server") + + return data + async def lookup_3pid( self, id_server: str, @@ -629,10 +768,13 @@ class IdentityHandler: Returns: the matrix ID of the 3pid, or None if it is not recognized. """ + # Rewrite id_server URL if necessary + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + if id_access_token is not None: try: results = await self._lookup_3pid_v2( - id_server, id_access_token, medium, address + id_server_url, id_access_token, medium, address ) return results @@ -650,16 +792,17 @@ class IdentityHandler: logger.warning("Error when looking up hashing details: %s", e) return None - return await self._lookup_3pid_v1(id_server, medium, address) + return await self._lookup_3pid_v1(id_server, id_server_url, medium, address) async def _lookup_3pid_v1( - self, id_server: str, medium: str, address: str + self, id_server: str, id_server_url: str, medium: str, address: str ) -> Optional[str]: """Looks up a 3pid in the passed identity server using v1 lookup. Args: id_server: The server name (including port, if required) of the identity server to use. + id_server_url: The actual, reachable domain of the id server medium: The type of the third party identifier (e.g. "email"). address: The third party identifier (e.g. "foo@example.com"). @@ -667,8 +810,8 @@ class IdentityHandler: the matrix ID of the 3pid, or None if it is not recognized. """ try: - data = await self.blacklisting_http_client.get_json( - "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server), + data = await self.http_client.get_json( + "%s/_matrix/identity/api/v1/lookup" % (id_server_url,), {"medium": medium, "address": address}, ) @@ -685,13 +828,12 @@ class IdentityHandler: return None async def _lookup_3pid_v2( - self, id_server: str, id_access_token: str, medium: str, address: str + self, id_server_url: str, id_access_token: str, medium: str, address: str ) -> Optional[str]: """Looks up a 3pid in the passed identity server using v2 lookup. Args: - id_server: The server name (including port, if required) - of the identity server to use. + id_server_url: The protocol scheme and domain of the id server id_access_token: The access token to authenticate to the identity server with medium: The type of the third party identifier (e.g. "email"). address: The third party identifier (e.g. "foo@example.com"). @@ -701,8 +843,8 @@ class IdentityHandler: """ # Check what hashing details are supported by this identity server try: - hash_details = await self.blacklisting_http_client.get_json( - "%s%s/_matrix/identity/v2/hash_details" % (id_server_scheme, id_server), + hash_details = await self.http_client.get_json( + "%s/_matrix/identity/v2/hash_details" % (id_server_url,), {"access_token": id_access_token}, ) except RequestTimedOutError: @@ -710,15 +852,14 @@ class IdentityHandler: if not isinstance(hash_details, dict): logger.warning( - "Got non-dict object when checking hash details of %s%s: %s", - id_server_scheme, - id_server, + "Got non-dict object when checking hash details of %s: %s", + id_server_url, hash_details, ) raise SynapseError( 400, - "Non-dict object from %s%s during v2 hash_details request: %s" - % (id_server_scheme, id_server, hash_details), + "Non-dict object from %s during v2 hash_details request: %s" + % (id_server_url, hash_details), ) # Extract information from hash_details @@ -732,8 +873,8 @@ class IdentityHandler: ): raise SynapseError( 400, - "Invalid hash details received from identity server %s%s: %s" - % (id_server_scheme, id_server, hash_details), + "Invalid hash details received from identity server %s: %s" + % (id_server_url, hash_details), ) # Check if any of the supported lookup algorithms are present @@ -755,7 +896,7 @@ class IdentityHandler: else: logger.warning( "None of the provided lookup algorithms of %s are supported: %s", - id_server, + id_server_url, supported_lookup_algorithms, ) raise SynapseError( @@ -768,8 +909,8 @@ class IdentityHandler: headers = {"Authorization": create_id_access_token_header(id_access_token)} try: - lookup_results = await self.blacklisting_http_client.post_json_get_json( - "%s%s/_matrix/identity/v2/lookup" % (id_server_scheme, id_server), + lookup_results = await self.http_client.post_json_get_json( + "%s/_matrix/identity/v2/lookup" % (id_server_url,), { "addresses": [lookup_value], "algorithm": lookup_algorithm, @@ -865,15 +1006,17 @@ class IdentityHandler: if self._web_client_location: invite_config["org.matrix.web_client_location"] = self._web_client_location + # Rewrite the identity server URL if necessary + id_server_url = self.rewrite_id_server_url(id_server, add_https=True) + # Add the identity service access token to the JSON body and use the v2 # Identity Service endpoints if id_access_token is present data = None - base_url = "%s%s/_matrix/identity" % (id_server_scheme, id_server) + base_url = "%s/_matrix/identity" % (id_server_url,) if id_access_token: - key_validity_url = "%s%s/_matrix/identity/v2/pubkey/isvalid" % ( - id_server_scheme, - id_server, + key_validity_url = "%s/_matrix/identity/v2/pubkey/isvalid" % ( + id_server_url, ) # Attempt a v2 lookup @@ -892,9 +1035,8 @@ class IdentityHandler: raise e if data is None: - key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % ( - id_server_scheme, - id_server, + key_validity_url = "%s/_matrix/identity/api/v1/pubkey/isvalid" % ( + id_server_url, ) url = base_url + "/api/v1/store-invite" @@ -906,9 +1048,8 @@ class IdentityHandler: raise SynapseError(500, "Timed out contacting identity server") except HttpResponseException as e: logger.warning( - "Error trying to call /store-invite on %s%s: %s", - id_server_scheme, - id_server, + "Error trying to call /store-invite on %s: %s", + id_server_url, e, ) @@ -922,10 +1063,9 @@ class IdentityHandler: ) except HttpResponseException as e: logger.warning( - "Error calling /store-invite on %s%s with fallback " + "Error calling /store-invite on %s with fallback " "encoding: %s", - id_server_scheme, - id_server, + id_server_url, e, ) raise e @@ -946,6 +1086,48 @@ class IdentityHandler: display_name = data["display_name"] return token, public_keys, fallback_public_key, display_name + async def bind_email_using_internal_sydent_api( + self, + id_server_url: str, + email: str, + user_id: str, + ) -> None: + """Bind an email to a fully qualified user ID using the internal API of an + instance of Sydent. + + Args: + id_server_url: The URL of the Sydent instance + email: The email address to bind + user_id: The user ID to bind the email to + + Raises: + HTTPResponseException: On a non-2xx HTTP response. + """ + # Extract the domain name from the IS URL as we store IS domains instead of URLs + id_server = urllib.parse.urlparse(id_server_url).hostname + if not id_server: + # We were unable to determine the hostname, bail out + return + + # id_server_url is assumed to have no trailing slashes + url = id_server_url + "/_matrix/identity/internal/bind" + body = { + "address": email, + "medium": "email", + "mxid": user_id, + } + + # Bind the threepid + await self.http_client.post_json_get_json(url, body) + + # Remember where we bound the threepid + await self.store.add_user_bound_threepid( + user_id=user_id, + medium="email", + address=email, + id_server=id_server, + ) + def create_id_access_token_header(id_access_token: str) -> List[str]: """Create an Authorization header for passing to SimpleHttpClient as the header value diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 6b5a6ded8b..aa47354a6a 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -1,4 +1,5 @@ # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +14,12 @@ # limitations under the License. import logging import random -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, List, Optional + +from signedjson.sign import sign_json + +from twisted.internet import reactor +from twisted.internet.defer import Deferred from synapse.api.errors import ( AuthError, @@ -23,7 +29,11 @@ from synapse.api.errors import ( StoreError, SynapseError, ) -from synapse.metrics.background_process_metrics import wrap_as_background_process +from synapse.logging.context import run_in_background +from synapse.metrics.background_process_metrics import ( + run_as_background_process, + wrap_as_background_process, +) from synapse.types import ( JsonDict, Requester, @@ -51,6 +61,8 @@ class ProfileHandler: PROFILE_UPDATE_MS = 60 * 1000 PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000 + PROFILE_REPLICATE_INTERVAL = 2 * 60 * 1000 + def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() self.clock = hs.get_clock() @@ -64,11 +76,102 @@ class ProfileHandler: self.user_directory_handler = hs.get_user_directory_handler() self.request_ratelimiter = hs.get_request_ratelimiter() + self.http_client = hs.get_simple_http_client() + + self.max_avatar_size = hs.config.media.max_avatar_size + self.allowed_avatar_mimetypes = hs.config.media.allowed_avatar_mimetypes + self.replicate_user_profiles_to = ( + hs.config.registration.replicate_user_profiles_to + ) + if hs.config.worker.run_background_tasks: self.clock.looping_call( self._update_remote_profile_cache, self.PROFILE_UPDATE_MS ) + if len(self.replicate_user_profiles_to) > 0: + reactor.callWhenRunning(self._do_assign_profile_replication_batches) # type: ignore + reactor.callWhenRunning(self._start_replicate_profiles) # type: ignore + # Add a looping call to replicate_profiles: this handles retries + # if the replication is unsuccessful when the user updated their + # profile. + self.clock.looping_call( + self._start_replicate_profiles, self.PROFILE_REPLICATE_INTERVAL + ) + + def _do_assign_profile_replication_batches(self) -> Deferred: + return run_as_background_process( + "_assign_profile_replication_batches", + self._assign_profile_replication_batches, + ) + + def _start_replicate_profiles(self) -> Deferred: + return run_as_background_process( + "_replicate_profiles", self._replicate_profiles + ) + + async def _assign_profile_replication_batches(self) -> None: + """If no profile replication has been done yet, allocate replication batch + numbers to each profile to start the replication process. + """ + logger.info("Assigning profile batch numbers...") + total = 0 + while True: + assigned = await self.store.assign_profile_batch() + total += assigned + if assigned == 0: + break + logger.info("Assigned %d profile batch numbers", total) + + async def _replicate_profiles(self) -> None: + """If any profile data has been updated and not pushed to the replication targets, + replicate it. + """ + host_batches = await self.store.get_replication_hosts() + latest_batch = await self.store.get_latest_profile_replication_batch_number() + if latest_batch is None: + latest_batch = -1 + for repl_host in self.replicate_user_profiles_to: + if repl_host not in host_batches: + host_batches[repl_host] = -1 + try: + for i in range(host_batches[repl_host] + 1, latest_batch + 1): + await self._replicate_host_profile_batch(repl_host, i) + except Exception: + logger.exception( + "Exception while replicating to %s: aborting for now", repl_host + ) + + async def _replicate_host_profile_batch(self, host: str, batchnum: int) -> None: + logger.info("Replicating profile batch %d to %s", batchnum, host) + batch_rows = await self.store.get_profile_batch(batchnum) + batch = { + UserID(r["user_id"], self.hs.hostname).to_string(): ( + {"display_name": r["displayname"], "avatar_url": r["avatar_url"]} + if r["active"] + else None + ) + for r in batch_rows + } + + url = "https://%s/_matrix/identity/api/v1/replicate_profiles" % (host,) + body = {"batchnum": batchnum, "batch": batch, "origin_server": self.hs.hostname} + signed_body = sign_json( + body, self.hs.hostname, self.hs.config.key.signing_key[0] + ) + try: + await self.http_client.post_json_get_json(url, signed_body) + await self.store.update_replication_batch_for_host(host, batchnum) + logger.info( + "Successfully replicated profile batch %d to %s", batchnum, host + ) + except Exception: + # This will get retried when the looping call next comes around + logger.exception( + "Failed to replicate profile batch %d to %s", batchnum, host + ) + raise + async def get_profile(self, user_id: str) -> JsonDict: target_user = UserID.from_string(user_id) @@ -211,8 +314,16 @@ class ProfileHandler: authenticated_entity=requester.authenticated_entity, ) + if len(self.replicate_user_profiles_to) > 0: + cur_batchnum = ( + await self.store.get_latest_profile_replication_batch_number() + ) + new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1 + else: + new_batchnum = None + await self.store.set_profile_displayname( - target_user.localpart, displayname_to_set + target_user.localpart, displayname_to_set, new_batchnum ) profile = await self.store.get_profileinfo(target_user.localpart) @@ -220,7 +331,53 @@ class ProfileHandler: target_user.to_string(), profile ) - await self._update_join_states(requester, target_user) + # Don't ratelimit when the admin makes the change. + # FIXME: this is because we call this function on registration to update DINUM's + # custom userdir. + await self._update_join_states(requester, target_user, ratelimit=not by_admin) + + # start a profile replication push + run_in_background(self._replicate_profiles) + + async def set_active( + self, + users: List[UserID], + active: bool, + hide: bool, + ) -> None: + """ + Sets the 'active' flag on a set of user profiles. If set to false, the + accounts are considered deactivated or hidden. + + If 'hide' is true, then we interpret active=False as a request to try to + hide the users rather than deactivating them. This means withholding the + profiles from replication (and mark it as inactive) rather than clearing + the profile from the HS DB. + + Note that unlike set_displayname and set_avatar_url, this does *not* + perform authorization checks! This is because the only place it's used + currently is in account deactivation where we've already done these + checks anyway. + + Args: + users: The users to modify + active: Whether to set the user to active or inactive + hide: Whether to hide the user (withold from replication). If + False and active is False, user will have their profile + erased + """ + if len(self.replicate_user_profiles_to) > 0: + cur_batchnum = ( + await self.store.get_latest_profile_replication_batch_number() + ) + new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1 + else: + new_batchnum = None + + await self.store.set_profiles_active(users, active, hide, new_batchnum) + + # start a profile replication push + run_in_background(self._replicate_profiles) async def get_avatar_url(self, target_user: UserID) -> Optional[str]: if self.hs.is_mine(target_user): @@ -290,14 +447,56 @@ class ProfileHandler: if new_avatar_url == "": avatar_url_to_set = None + # Enforce a max avatar size if one is defined + if avatar_url_to_set and ( + self.max_avatar_size or self.allowed_avatar_mimetypes + ): + media_id = self._validate_and_parse_media_id_from_avatar_url( + avatar_url_to_set + ) + + # Check that this media exists locally + media_info = await self.store.get_local_media(media_id) + if not media_info: + raise SynapseError( + 400, "Unknown media id supplied", errcode=Codes.NOT_FOUND + ) + + # Ensure avatar does not exceed max allowed avatar size + media_size = media_info["media_length"] + if self.max_avatar_size and media_size > self.max_avatar_size: + raise SynapseError( + 400, + "Avatars must be less than %s bytes in size" + % (self.max_avatar_size,), + errcode=Codes.TOO_LARGE, + ) + + # Ensure the avatar's file type is allowed + if ( + self.allowed_avatar_mimetypes + and media_info["media_type"] not in self.allowed_avatar_mimetypes + ): + raise SynapseError( + 400, "Avatar file type '%s' not allowed" % media_info["media_type"] + ) + # Same like set_displayname if by_admin: requester = create_requester( target_user, authenticated_entity=requester.authenticated_entity ) + if len(self.replicate_user_profiles_to) > 0: + cur_batchnum = ( + await self.store.get_latest_profile_replication_batch_number() + ) + new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1 + else: + new_batchnum = None + await self.store.set_profile_avatar_url( - target_user.localpart, avatar_url_to_set + target_user.localpart, avatar_url_to_set, new_batchnum ) profile = await self.store.get_profileinfo(target_user.localpart) @@ -307,6 +506,23 @@ class ProfileHandler: await self._update_join_states(requester, target_user) + # start a profile replication push + run_in_background(self._replicate_profiles) + + def _validate_and_parse_media_id_from_avatar_url(self, mxc: str) -> str: + """Validate and parse a provided avatar url and return the local media id + + Args: + mxc: A mxc URL + + Returns: + The ID of the media + """ + avatar_pieces = mxc.split("/") + if len(avatar_pieces) != 4 or avatar_pieces[0] != "mxc:": + raise SynapseError(400, "Invalid avatar URL '%s' supplied" % mxc) + return avatar_pieces[-1] + async def on_profile_query(self, args: JsonDict) -> JsonDict: """Handles federation profile query requests.""" @@ -342,12 +558,16 @@ class ProfileHandler: return response async def _update_join_states( - self, requester: Requester, target_user: UserID + self, + requester: Requester, + target_user: UserID, + ratelimit: bool = True, ) -> None: if not self.hs.is_mine(target_user): return - await self.request_ratelimiter.ratelimit(requester) + if ratelimit: + await self.request_ratelimiter.ratelimit(requester) # Do not actually update the room state for shadow-banned users. if requester.shadow_banned: diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 448a36108e..eea041050a 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -92,6 +92,7 @@ class RegistrationHandler: self._auth_handler = hs.get_auth_handler() self.profile_handler = hs.get_profile_handler() self.user_directory_handler = hs.get_user_directory_handler() + self.http_client = hs.get_simple_http_client() self.identity_handler = self.hs.get_identity_handler() self.ratelimiter = hs.get_registration_ratelimiter() self.macaroon_gen = hs.get_macaroon_generator() @@ -102,6 +103,10 @@ class RegistrationHandler: self.spam_checker = hs.get_spam_checker() + self._show_in_user_directory = ( + self.hs.config.server.show_users_in_user_directory + ) + if hs.config.worker.worker_app: self._register_client = ReplicationRegisterServlet.make_client(hs) self._register_device_client = RegisterDeviceReplicationServlet.make_client( @@ -170,6 +175,8 @@ class RegistrationHandler: raise SynapseError( 400, "User ID already taken.", errcode=Codes.USER_IN_USE ) + + # Retrieve guest user information from provided access token user_data = await self.auth.get_user_by_access_token(guest_access_token) if ( not user_data.is_guest @@ -297,6 +304,16 @@ class RegistrationHandler: shadow_banned=shadow_banned, ) + if default_display_name: + requester = create_requester(user) + # FIXME: this function call is DINUM-specific code to update DINUM's + # custom Sydent-powered userdir, and needed some custom changes to + # ignore the ratelimiter. On mainline, we don't need to call this + # function. + await self.profile_handler.set_displayname( + user, requester, default_display_name, by_admin=True + ) + profile = await self.store.get_profileinfo(localpart) await self.user_directory_handler.handle_local_profile_change( user_id, profile @@ -305,8 +322,6 @@ class RegistrationHandler: else: # autogen a sequential user ID fail_count = 0 - # If a default display name is not given, generate one. - generate_display_name = default_display_name is None # This breaks on successful registration *or* errors after 10 failures. while True: # Fail after being unable to find a suitable ID a few times @@ -317,7 +332,7 @@ class RegistrationHandler: user = UserID(localpart, self.hs.hostname) user_id = user.to_string() self.check_user_id_not_appservice_exclusive(user_id) - if generate_display_name: + if default_display_name is None: default_display_name = localpart try: await self.register_with_store( @@ -329,6 +344,11 @@ class RegistrationHandler: shadow_banned=shadow_banned, ) + requester = create_requester(user) + await self.profile_handler.set_displayname( + user, requester, default_display_name, by_admin=True + ) + # Successfully registered break except SynapseError: @@ -371,7 +391,15 @@ class RegistrationHandler: } # Bind email to new account - await self._register_email_threepid(user_id, threepid_dict, None) + await self.register_email_threepid(user_id, threepid_dict, None) + + # Prevent the new user from showing up in the user directory if the server + # mandates it. + if not self._show_in_user_directory: + await self.store.add_account_data_for_user( + user_id, "im.vector.hide_profile", {"hide_profile": True} + ) + await self.profile_handler.set_active([user], False, True) return user_id @@ -605,6 +633,7 @@ class RegistrationHandler: await self._auto_join_rooms(user_id) async def appservice_register(self, user_localpart: str, as_token: str) -> str: + # FIXME: this should be factored out and merged with normal register() user = UserID(user_localpart, self.hs.hostname) user_id = user.to_string() service = self.store.get_app_service_by_token(as_token) @@ -859,6 +888,7 @@ class RegistrationHandler: if auth_result and LoginType.EMAIL_IDENTITY in auth_result: threepid = auth_result[LoginType.EMAIL_IDENTITY] + # Necessary due to auth checks prior to the threepid being # written to the db if is_threepid_reserved( @@ -866,7 +896,34 @@ class RegistrationHandler: ): await self.store.upsert_monthly_active_user(user_id) - await self._register_email_threepid(user_id, threepid, access_token) + await self.register_email_threepid(user_id, threepid, access_token) + + if self.hs.config.registration.bind_new_user_emails_to_sydent: + # Attempt to call Sydent's internal bind API on the given identity server + # to bind this threepid + id_server_url = ( + self.hs.config.registration.bind_new_user_emails_to_sydent + ) + + logger.debug( + "Attempting the bind email of %s to identity server: %s using " + "internal Sydent bind API.", + user_id, + self.hs.config.registration.bind_new_user_emails_to_sydent, + ) + + try: + await self.identity_handler.bind_email_using_internal_sydent_api( + id_server_url, threepid["address"], user_id + ) + except Exception as e: + logger.warning( + "Failed to bind email of '%s' to Sydent instance '%s' ", + "using Sydent internal bind API: %s", + user_id, + id_server_url, + e, + ) if auth_result and LoginType.MSISDN in auth_result: threepid = auth_result[LoginType.MSISDN] @@ -888,7 +945,7 @@ class RegistrationHandler: await self.store.user_set_consent_version(user_id, consent_version) await self.post_consent_actions(user_id) - async def _register_email_threepid( + async def register_email_threepid( self, user_id: str, threepid: dict, token: Optional[str] ) -> None: """Add an email address as a 3pid identifier diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index a6dbff637f..b81180af01 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1298,6 +1298,16 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): Codes.FORBIDDEN, ) + can_invite = await self.third_party_event_rules.check_threepid_can_be_invited( + medium, address, room_id + ) + if not can_invite: + raise SynapseError( + 403, + "This third-party identifier can not be invited in this room", + Codes.FORBIDDEN, + ) + if not self._enable_lookup: raise SynapseError( 403, "Looking up third-party identifiers is denied from this server" diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 706ad72761..c57d08164f 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -1,4 +1,5 @@ -# Copyright 2017 New Vector Ltd +# -*- coding: utf-8 -*- +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 91ba93372c..d1e1f1b1d8 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -22,6 +22,7 @@ from typing import ( Optional, Sequence, Tuple, + Union, overload, ) @@ -649,7 +650,9 @@ def parse_json_object_from_request( return content -def assert_params_in_dict(body: JsonDict, required: Iterable[str]) -> None: +def assert_params_in_dict( + body: JsonDict, required: Iterable[Union[str, bytes]] +) -> None: absent = [] for k in required: if k not in body: diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 6211506990..4d284de133 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -495,7 +495,11 @@ BASE_APPEND_UNDERRIDE_RULES = [ "_id": "_message", } ], - "actions": ["notify", {"set_tweak": "highlight", "value": False}], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + {"set_tweak": "highlight", "value": False}, + ], }, # XXX: this is going to fire for events which aren't m.room.messages # but are encrypted (e.g. m.call.*)... @@ -509,7 +513,11 @@ BASE_APPEND_UNDERRIDE_RULES = [ "_id": "_encrypted", } ], - "actions": ["notify", {"set_tweak": "highlight", "value": False}], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + {"set_tweak": "highlight", "value": False}, + ], }, { "rule_id": "global/underride/.im.vector.jitsi", diff --git a/synapse/res/templates/sso_auth_bad_user.html b/synapse/res/templates/sso_auth_bad_user.html index da579ffe69..c061698a21 100644 --- a/synapse/res/templates/sso_auth_bad_user.html +++ b/synapse/res/templates/sso_auth_bad_user.html @@ -12,7 +12,7 @@ <header> <h1>That doesn't look right</h1> <p> - <strong>We were unable to validate your {{ server_name }} account</strong> + <strong>We were unable to validate your {{ server_name }} account</strong> via single sign‑on (SSO), because the SSO Identity Provider returned different details than when you logged in. </p> diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index cebdeecb81..5e57909478 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -123,6 +123,7 @@ class ClientRestResource(JsonResource): room_batch.register_servlets(hs, client_resource) capabilities.register_servlets(hs, client_resource) account_validity.register_servlets(hs, client_resource) + password_policy.register_servlets(hs, client_resource) relations.register_servlets(hs, client_resource) password_policy.register_servlets(hs, client_resource) knock.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 6b272658fc..2386bbb87d 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -1,6 +1,6 @@ # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2018, 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,9 @@ # limitations under the License. import logging import random +import re from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional, Tuple from urllib.parse import urlparse from twisted.web.server import Request @@ -171,6 +172,7 @@ class PasswordRestServlet(RestServlet): self.datastore = self.hs.get_datastore() self.password_policy_handler = hs.get_password_policy_handler() self._set_password_handler = hs.get_set_password_handler() + self.http_client = hs.get_simple_http_client() @interactive_auth_handler async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: @@ -197,8 +199,12 @@ class PasswordRestServlet(RestServlet): requester = None if self.auth.has_access_token(request): requester = await self.auth.get_user_by_req(request) + # blindly trust ASes without UI-authing them try: - params, session_id = await self.auth_handler.validate_user_via_ui_auth( + ( + params, + session_id, + ) = await self.auth_handler.validate_user_via_ui_auth( requester, request, body, @@ -288,6 +294,9 @@ class PasswordRestServlet(RestServlet): return 200, {} + def on_OPTIONS(self, _: Any) -> Tuple[int, JsonDict]: + return 200, {} + class DeactivateAccountRestServlet(RestServlet): PATTERNS = client_patterns("/account/deactivate$") @@ -385,10 +394,10 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param - if not check_3pid_allowed(self.hs, "email", email): + if not (await check_3pid_allowed(self.hs, "email", email)): raise SynapseError( 403, - "Your email domain is not authorized on this server", + "Your email is not authorized on this server", Codes.THREEPID_DENIED, ) @@ -468,7 +477,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet): msisdn = phone_number_to_msisdn(country, phone_number) - if not check_3pid_allowed(self.hs, "msisdn", msisdn): + if not (await check_3pid_allowed(self.hs, "msisdn", msisdn)): raise SynapseError( 403, "Account phone numbers are not authorized on this server", @@ -634,7 +643,7 @@ class ThreepidRestServlet(RestServlet): self.identity_handler = hs.get_identity_handler() self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() - self.datastore = self.hs.get_datastore() + self.datastore = hs.get_datastore() async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) @@ -690,6 +699,7 @@ class ThreepidAddRestServlet(RestServlet): self.identity_handler = hs.get_identity_handler() self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() + self.http_client = hs.get_simple_http_client() @interactive_auth_handler async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: @@ -831,6 +841,65 @@ class ThreepidDeleteRestServlet(RestServlet): return 200, {"id_server_unbind_result": id_server_unbind_result} +class ThreepidLookupRestServlet(RestServlet): + PATTERNS = [re.compile("^/_matrix/client/unstable/account/3pid/lookup$")] + + def __init__(self, hs: "HomeServer") -> None: + super(ThreepidLookupRestServlet, self).__init__() + self.auth = hs.get_auth() + self.identity_handler = hs.get_identity_handler() + + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + """Proxy a /_matrix/identity/api/v1/lookup request to an identity + server + """ + await self.auth.get_user_by_req(request) + + # Verify query parameters + # Mypy will complain that request.args is of an incompatible type with JsonDict + # because Twisted is badly typed, so we just ignore it. + query_params: JsonDict = request.args # type: ignore[assignment] + assert_params_in_dict(query_params, [b"medium", b"address", b"id_server"]) + + # Retrieve needed information from query parameters + medium = parse_string(request, "medium", required=True) + address = parse_string(request, "address", required=True) + id_server = parse_string(request, "id_server", required=True) + + # Proxy the request to the identity server. lookup_3pid handles checking + # if the lookup is allowed so we don't need to do it here. + ret = await self.identity_handler.proxy_lookup_3pid(id_server, medium, address) + + return 200, ret + + +class ThreepidBulkLookupRestServlet(RestServlet): + PATTERNS = [re.compile("^/_matrix/client/unstable/account/3pid/bulk_lookup$")] + + def __init__(self, hs: "HomeServer") -> None: + super(ThreepidBulkLookupRestServlet, self).__init__() + self.auth = hs.get_auth() + self.identity_handler = hs.get_identity_handler() + + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + """Proxy a /_matrix/identity/api/v1/bulk_lookup request to an identity + server + """ + await self.auth.get_user_by_req(request) + + body = parse_json_object_from_request(request) + + assert_params_in_dict(body, ["threepids", "id_server"]) + + # Proxy the request to the identity server. lookup_3pid handles checking + # if the lookup is allowed so we don't need to do it here. + ret = await self.identity_handler.proxy_bulk_lookup_3pid( + body["id_server"], body["threepids"] + ) + + return 200, ret + + def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None: """ Raises a SynapseError if a given next_link value is invalid @@ -907,4 +976,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ThreepidBindRestServlet(hs).register(http_server) ThreepidUnbindRestServlet(hs).register(http_server) ThreepidDeleteRestServlet(hs).register(http_server) + ThreepidLookupRestServlet(hs).register(http_server) + ThreepidBulkLookupRestServlet(hs).register(http_server) WhoamiRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index d1badbdf3b..273d4c5c04 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -19,7 +19,7 @@ from synapse.api.errors import AuthError, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseRequest -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from ._base import client_patterns @@ -44,6 +44,9 @@ class AccountDataServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastore() self.handler = hs.get_account_data_handler() + self.notifier = hs.get_notifier() + self._is_worker = hs.config.worker_app is not None + self._profile_handler = hs.get_profile_handler() async def on_PUT( self, request: SynapseRequest, user_id: str, account_data_type: str @@ -54,7 +57,15 @@ class AccountDataServlet(RestServlet): body = parse_json_object_from_request(request) - await self.handler.add_account_data_for_user(user_id, account_data_type, body) + if account_data_type == "im.vector.hide_profile": + user = UserID.from_string(user_id) + hide_profile = body.get("hide_profile") + await self._profile_handler.set_active([user], not hide_profile, True) + + max_id = await self.handler.add_account_data_for_user( + user_id, account_data_type, body + ) + self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) return 200, {} diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index c684636c0a..2ff35f4b84 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -79,6 +79,9 @@ class ProfileDisplaynameRestServlet(RestServlet): return 200, {} + def on_OPTIONS(self, request: SynapseRequest, user_id: str) -> Tuple[int, JsonDict]: + return 200, {} + class ProfileAvatarURLRestServlet(RestServlet): PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/avatar_url", v1=True) @@ -131,6 +134,9 @@ class ProfileAvatarURLRestServlet(RestServlet): return 200, {} + def on_OPTIONS(self, request: SynapseRequest, user_id: str) -> Tuple[int, JsonDict]: + return 200, {} + class ProfileRestServlet(RestServlet): PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)", v1=True) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index d2b11e39d9..d57d9a4f99 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -1,5 +1,5 @@ -# Copyright 2015 - 2016 OpenMarket Ltd -# Copyright 2017 Vector Creations Ltd +# -*- coding: utf-8 -*- +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,6 +14,7 @@ # limitations under the License. import logging import random +import re from typing import TYPE_CHECKING, List, Optional, Tuple from twisted.web.server import Request @@ -113,10 +114,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param - if not check_3pid_allowed(self.hs, "email", email): + if not (await check_3pid_allowed(self.hs, "email", body["email"])): raise SynapseError( 403, - "Your email domain is not authorized to register on this server", + "Your email is not authorized to register on this server", Codes.THREEPID_DENIED, ) @@ -193,7 +194,9 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet): msisdn = phone_number_to_msisdn(country, phone_number) - if not check_3pid_allowed(self.hs, "msisdn", msisdn): + assert_valid_client_secret(body["client_secret"]) + + if not (await check_3pid_allowed(self.hs, "msisdn", msisdn)): raise SynapseError( 403, "Phone numbers are not authorized to register on this server", @@ -346,15 +349,9 @@ class UsernameAvailabilityRestServlet(RestServlet): 403, "Registration has been disabled", errcode=Codes.FORBIDDEN ) - ip = request.getClientIP() - with self.ratelimiter.ratelimit(ip) as wait_deferred: - await wait_deferred - - username = parse_string(request, "username", required=True) - - await self.registration_handler.check_username(username) - - return 200, {"available": True} + # We are not interested in logging in via a username in this deployment. + # Simply allow anything here as it won't be used later. + return 200, {"available": True} class RegistrationTokenValidityRestServlet(RestServlet): @@ -455,13 +452,22 @@ class RegisterRestServlet(RestServlet): else: should_issue_refresh_token = False - # Pull out the provided username and do basic sanity checks early since - # the auth layer will store these in sessions. + # We don't care about usernames for this deployment. In fact, the act + # of checking whether they exist already can leak metadata about + # which users are already registered. + # + # Usernames are already derived via the provided email. + # So, if they're not necessary, just ignore them. + # + # (we do still allow appservices to set them below) desired_username = None - if "username" in body: - if not isinstance(body["username"], str) or len(body["username"]) > 512: - raise SynapseError(400, "Invalid username") - desired_username = body["username"] + + desired_display_name = body.get("display_name") + + # We need to retrieve the password early in order to pass it to + # application service registration + # This is specific to shadow server registration of users via an AS + password = body.pop("password", None) # fork off as soon as possible for ASes which have completely # different registration flows to normal users @@ -480,7 +486,7 @@ class RegisterRestServlet(RestServlet): # Set the desired user according to the AS API (which uses the # 'user' key not 'username'). Since this is a new addition, we'll # fallback to 'username' if they gave one. - desired_username = body.get("user", desired_username) + desired_username = body.get("user", body.get("username")) # XXX we should check that desired_username is valid. Currently # we give appservices carte blanche for any insanity in mxids, @@ -510,16 +516,6 @@ class RegisterRestServlet(RestServlet): if not self._registration_enabled: raise SynapseError(403, "Registration has been disabled", Codes.FORBIDDEN) - # For regular registration, convert the provided username to lowercase - # before attempting to register it. This should mean that people who try - # to register with upper-case in their usernames don't get a nasty surprise. - # - # Note that we treat usernames case-insensitively in login, so they are - # free to carry on imagining that their username is CrAzYh4cKeR if that - # keeps them happy. - if desired_username is not None: - desired_username = desired_username.lower() - # Check if this account is upgrading from a guest account. guest_access_token = body.get("guest_access_token", None) @@ -528,7 +524,6 @@ class RegisterRestServlet(RestServlet): # Note that we remove the password from the body since the auth layer # will store the body in the session and we don't want a plaintext # password store there. - password = body.pop("password", None) if password is not None: if not isinstance(password, str) or len(password) > 512: raise SynapseError(400, "Invalid password") @@ -558,14 +553,6 @@ class RegisterRestServlet(RestServlet): session_id, UIAuthSessionDataConstants.PASSWORD_HASH, None ) - # Ensure that the username is valid. - if desired_username is not None: - await self.registration_handler.check_username( - desired_username, - guest_access_token=guest_access_token, - assigned_user_id=registered_user_id, - ) - # Check if the user-interactive authentication flows are complete, if # not this will raise a user-interactive auth error. try: @@ -605,7 +592,7 @@ class RegisterRestServlet(RestServlet): medium = auth_result[login_type]["medium"] address = auth_result[login_type]["address"] - if not check_3pid_allowed(self.hs, medium, address): + if not (await check_3pid_allowed(self.hs, medium, address)): raise SynapseError( 403, "Third party identifiers (email/phone numbers)" @@ -613,6 +600,80 @@ class RegisterRestServlet(RestServlet): Codes.THREEPID_DENIED, ) + existingUid = await self.store.get_user_id_by_threepid( + medium, address + ) + + if existingUid is not None: + raise SynapseError( + 400, "%s is already in use" % medium, Codes.THREEPID_IN_USE + ) + + if self.hs.config.registration.register_mxid_from_3pid: + # override the desired_username based on the 3PID if any. + # reset it first to avoid folks picking their own username. + desired_username = None + + # we should have an auth_result at this point if we're going to progress + # to register the user (i.e. we haven't picked up a registered_user_id + # from our session store), in which case get ready and gen the + # desired_username + if auth_result: + if ( + self.hs.config.registration.register_mxid_from_3pid == "email" + and LoginType.EMAIL_IDENTITY in auth_result + ): + address = auth_result[LoginType.EMAIL_IDENTITY]["address"] + desired_username = synapse.types.strip_invalid_mxid_characters( + address.replace("@", "-").lower() + ) + + # find a unique mxid for the account, suffixing numbers + # if needed + while True: + try: + await self.registration_handler.check_username( + desired_username, + guest_access_token=guest_access_token, + assigned_user_id=registered_user_id, + ) + # if we got this far we passed the check. + break + except SynapseError as e: + if e.errcode == Codes.USER_IN_USE: + m = re.match(r"^(.*?)(\d+)$", desired_username) + if m: + desired_username = m.group(1) + str( + int(m.group(2)) + 1 + ) + else: + desired_username += "1" + else: + # something else went wrong. + break + + if self.hs.config.register_just_use_email_for_display_name: + desired_display_name = address + else: + # Custom mapping between email address and display name + desired_display_name = _map_email_to_displayname(address) + elif ( + self.hs.config.registration.register_mxid_from_3pid == "msisdn" + and LoginType.MSISDN in auth_result + ): + desired_username = auth_result[LoginType.MSISDN]["address"] + else: + raise SynapseError( + 400, "Cannot derive mxid from 3pid; no recognised 3pid" + ) + + if desired_username is not None: + await self.registration_handler.check_username( + desired_username, + guest_access_token=guest_access_token, + assigned_user_id=registered_user_id, + ) + if registered_user_id is not None: logger.info( "Already registered user ID %r for this session", registered_user_id @@ -627,7 +688,12 @@ class RegisterRestServlet(RestServlet): if not password_hash: raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM) - desired_username = params.get("username", None) + if not self.hs.config.registration.register_mxid_from_3pid: + desired_username = params.get("username", None) + else: + # we keep the original desired_username derived from the 3pid above + pass + guest_access_token = params.get("guest_access_token", None) if desired_username is not None: @@ -676,6 +742,7 @@ class RegisterRestServlet(RestServlet): localpart=desired_username, password_hash=password_hash, guest_access_token=guest_access_token, + default_display_name=desired_display_name, threepid=threepid, address=client_addr, user_agent_ips=entries, @@ -838,6 +905,60 @@ class RegisterRestServlet(RestServlet): return 200, result +def cap(name: str) -> str: + """Capitalise parts of a name containing different words, including those + separated by hyphens. + For example, 'John-Doe' + + Args: + The name to parse + """ + if not name: + return name + + # Split the name by whitespace then hyphens, capitalizing each part then + # joining it back together. + capatilized_name = " ".join( + "-".join(part.capitalize() for part in space_part.split("-")) + for space_part in name.split() + ) + return capatilized_name + + +def _map_email_to_displayname(address: str) -> str: + """Custom mapping from an email address to a user displayname + + Args: + address: The email address to process + Returns: + The new displayname + """ + # Split the part before and after the @ in the email. + # Replace all . with spaces in the first part + parts = address.replace(".", " ").split("@") + + # Figure out which org this email address belongs to + org_parts = parts[1].split(" ") + + # If this is a ...matrix.org email, mark them as an Admin + if org_parts[-2] == "matrix" and org_parts[-1] == "org": + org = "Tchap Admin" + + # Is this is a ...gouv.fr address, set the org to whatever is before + # gouv.fr. If there isn't anything (a @gouv.fr email) simply mark their + # org as "gouv" + elif org_parts[-2] == "gouv" and org_parts[-1] == "fr": + org = org_parts[-3] if len(org_parts) > 2 else org_parts[-2] + + # Otherwise, mark their org as the email's second-level domain name + else: + org = org_parts[-2] + + desired_display_name = cap(parts[0]) + " [" + cap(org) + "]" + + return desired_display_name + + def _calculate_registration_flows( config: HomeServerConfig, auth_handler: AuthHandler ) -> List[List[str]]: diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 955d4e8641..418e92af32 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -801,7 +801,7 @@ class RoomMembershipRestServlet(TransactionRestServlet): content["id_server"], requester, txn_id, - content.get("id_access_token"), + id_access_token=content.get("id_access_token"), ) except ShadowBanError: # Pretend the request succeeded. diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py index a47d9bd01d..8f1b234046 100644 --- a/synapse/rest/client/user_directory.py +++ b/synapse/rest/client/user_directory.py @@ -13,13 +13,19 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING, Dict, Tuple -from synapse.api.errors import SynapseError +from signedjson.sign import sign_json + +from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer -from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.servlet import ( + RestServlet, + assert_params_in_dict, + parse_json_object_from_request, +) from synapse.http.site import SynapseRequest -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from ._base import client_patterns @@ -37,6 +43,7 @@ class UserDirectorySearchRestServlet(RestServlet): self.hs = hs self.auth = hs.get_auth() self.user_directory_handler = hs.get_user_directory_handler() + self.http_client = hs.get_simple_http_client() async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: """Searches for users in directory @@ -63,6 +70,16 @@ class UserDirectorySearchRestServlet(RestServlet): body = parse_json_object_from_request(request) + if self.hs.config.userdirectory.user_directory_defer_to_id_server: + signed_body = sign_json( + body, self.hs.hostname, self.hs.config.signing_key[0] + ) + url = "%s/_matrix/identity/api/v1/user_directory/search" % ( + self.hs.config.userdirectory.user_directory_defer_to_id_server, + ) + resp = await self.http_client.post_json_get_json(url, signed_body) + return 200, resp + limit = body.get("limit", 10) limit = min(limit, 50) @@ -78,5 +95,126 @@ class UserDirectorySearchRestServlet(RestServlet): return 200, results +class SingleUserInfoServlet(RestServlet): + """ + Deprecated and replaced by `/users/info` + + GET /user/{user_id}/info HTTP/1.1 + """ + + PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/info$") + + def __init__(self, hs: "HomeServer") -> None: + super(SingleUserInfoServlet, self).__init__() + self.hs = hs + self.auth = hs.get_auth() + self.store = hs.get_datastore() + self.transport_layer = hs.get_federation_transport_client() + registry = hs.get_federation_registry() + + if not registry.query_handlers.get("user_info"): + registry.register_query_handler("user_info", self._on_federation_query) + + async def on_GET( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + # Ensure the user is authenticated + await self.auth.get_user_by_req(request) + + user = UserID.from_string(user_id) + if not self.hs.is_mine(user): + # Attempt to make a federation request to the server that owns this user + args = {"user_id": user_id} + res = await self.transport_layer.make_query( + user.domain, "user_info", args, retry_on_dns_fail=True + ) + return 200, res + + user_id_to_info = await self.store.get_info_for_users([user_id]) + return 200, user_id_to_info[user_id] + + async def _on_federation_query(self, args: JsonDict) -> JsonDict: + """Called when a request for user information appears over federation + + Args: + args: Dictionary of query arguments provided by the request + + Returns: + Deactivation and expiration information for a given user + """ + user_id = args.get("user_id") + if not user_id: + raise SynapseError(400, "user_id not provided") + + user = UserID.from_string(user_id) + if not self.hs.is_mine(user): + raise SynapseError(400, "User is not hosted on this homeserver") + + user_ids_to_info_dict = await self.store.get_info_for_users([user_id]) + return user_ids_to_info_dict[user_id] + + +class UserInfoServlet(RestServlet): + """Bulk version of `/user/{user_id}/info` endpoint + + GET /users/info HTTP/1.1 + + Returns a dictionary of user_id to info dictionary. Supports remote users + """ + + PATTERNS = client_patterns("/users/info$", unstable=True, releases=()) + + def __init__(self, hs: "HomeServer") -> None: + super(UserInfoServlet, self).__init__() + self.hs = hs + self.auth = hs.get_auth() + self.store = hs.get_datastore() + self.transport_layer = hs.get_federation_transport_client() + + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + # Ensure the user is authenticated + await self.auth.get_user_by_req(request) + + # Extract the user_ids from the request + body = parse_json_object_from_request(request) + assert_params_in_dict(body, required=["user_ids"]) + + user_ids = body["user_ids"] + if not isinstance(user_ids, list): + raise SynapseError( + 400, + "'user_ids' must be a list of user ID strings", + errcode=Codes.INVALID_PARAM, + ) + + # Separate local and remote users + local_user_ids = set() + remote_server_to_user_ids = {} # type: Dict[str, set] + for user_id in user_ids: + user = UserID.from_string(user_id) + + if self.hs.is_mine(user): + local_user_ids.add(user_id) + else: + remote_server_to_user_ids.setdefault(user.domain, set()) + remote_server_to_user_ids[user.domain].add(user_id) + + # Retrieve info of all local users + user_id_to_info_dict = await self.store.get_info_for_users(local_user_ids) + + # Request info of each remote user from their remote homeserver + for server_name, user_id_set in remote_server_to_user_ids.items(): + # Make a request to the given server about their own users + res = await self.transport_layer.get_info_of_users( + server_name, list(user_id_set) + ) + + user_id_to_info_dict.update(res) + + return 200, user_id_to_info_dict + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserDirectorySearchRestServlet(hs).register(http_server) + SingleUserInfoServlet(hs).register(http_server) + UserInfoServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 8d888f4565..07f605fa64 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -80,9 +80,12 @@ class VersionsRestServlet(RestServlet): # MSC2326. "org.matrix.label_based_filtering": True, # Implements support for cross signing as described in MSC1756 - "org.matrix.e2e_cross_signing": True, + # "org.matrix.e2e_cross_signing": True, # Implements additional endpoints as described in MSC2432 "org.matrix.msc2432": True, + # Tchap does not currently assume this rule for r0.5.0 + # XXX: Remove this when it does + "m.lazy_load_members": True, # Implements additional endpoints as described in MSC2666 "uk.half-shot.msc2666": True, # Whether new rooms will be set to encrypted or not (based on presets). diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index e197b7203e..b89b330038 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -1,4 +1,5 @@ # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,12 +12,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Tuple from synapse.api.errors import StoreError from synapse.storage._base import SQLBaseStore from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main.roommember import ProfileInfo +from synapse.types import UserID +from synapse.util.caches.descriptors import cached + +BATCH_SIZE = 100 class ProfileWorkerStore(SQLBaseStore): @@ -39,6 +44,7 @@ class ProfileWorkerStore(SQLBaseStore): avatar_url=profile["avatar_url"], display_name=profile["displayname"] ) + @cached(max_entries=5000) async def get_profile_displayname(self, user_localpart: str) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", @@ -47,6 +53,7 @@ class ProfileWorkerStore(SQLBaseStore): desc="get_profile_displayname", ) + @cached(max_entries=5000) async def get_profile_avatar_url(self, user_localpart: str) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", @@ -55,6 +62,58 @@ class ProfileWorkerStore(SQLBaseStore): desc="get_profile_avatar_url", ) + async def get_latest_profile_replication_batch_number(self): + def f(txn): + txn.execute("SELECT MAX(batch) as maxbatch FROM profiles") + rows = self.db_pool.cursor_to_dict(txn) + return rows[0]["maxbatch"] + + return await self.db_pool.runInteraction( + "get_latest_profile_replication_batch_number", f + ) + + async def get_profile_batch(self, batchnum): + return await self.db_pool.simple_select_list( + table="profiles", + keyvalues={"batch": batchnum}, + retcols=("user_id", "displayname", "avatar_url", "active"), + desc="get_profile_batch", + ) + + async def assign_profile_batch(self): + def f(txn): + sql = ( + "UPDATE profiles SET batch = " + "(SELECT COALESCE(MAX(batch), -1) + 1 FROM profiles) " + "WHERE user_id in (" + " SELECT user_id FROM profiles WHERE batch is NULL limit ?" + ")" + ) + txn.execute(sql, (BATCH_SIZE,)) + return txn.rowcount + + return await self.db_pool.runInteraction("assign_profile_batch", f) + + async def get_replication_hosts(self): + def f(txn): + txn.execute( + "SELECT host, last_synced_batch FROM profile_replication_status" + ) + rows = self.db_pool.cursor_to_dict(txn) + return {r["host"]: r["last_synced_batch"] for r in rows} + + return await self.db_pool.runInteraction("get_replication_hosts", f) + + async def update_replication_batch_for_host( + self, host: str, last_synced_batch: int + ): + return await self.db_pool.simple_upsert( + table="profile_replication_status", + keyvalues={"host": host}, + values={"last_synced_batch": last_synced_batch}, + desc="update_replication_batch_for_host", + ) + async def get_from_remote_profile_cache( self, user_id: str ) -> Optional[Dict[str, Any]]: @@ -72,32 +131,99 @@ class ProfileWorkerStore(SQLBaseStore): ) async def set_profile_displayname( - self, user_localpart: str, new_displayname: Optional[str] + self, user_localpart: str, new_displayname: Optional[str], batchnum: int ) -> None: + # Invalidate the read cache for this user + self.get_profile_displayname.invalidate((user_localpart,)) + await self.db_pool.simple_upsert( table="profiles", keyvalues={"user_id": user_localpart}, - values={"displayname": new_displayname}, + values={"displayname": new_displayname, "batch": batchnum}, desc="set_profile_displayname", + lock=False, # we can do this because user_id has a unique index ) async def set_profile_avatar_url( - self, user_localpart: str, new_avatar_url: Optional[str] + self, user_localpart: str, new_avatar_url: Optional[str], batchnum: int ) -> None: + # Invalidate the read cache for this user + self.get_profile_avatar_url.invalidate((user_localpart,)) + await self.db_pool.simple_upsert( table="profiles", keyvalues={"user_id": user_localpart}, - values={"avatar_url": new_avatar_url}, + values={"avatar_url": new_avatar_url, "batch": batchnum}, desc="set_profile_avatar_url", + lock=False, # we can do this because user_id has a unique index + ) + + async def set_profiles_active( + self, + users: List[UserID], + active: bool, + hide: bool, + batchnum: int, + ) -> None: + """Given a set of users, set active and hidden flags on them. + + Args: + users: A list of UserIDs + active: Whether to set the users to active or inactive + hide: Whether to hide the users (withold from replication). If + False and active is False, users will have their profiles + erased + batchnum: The batch number, used for profile replication + """ + # Convert list of localparts to list of tuples containing localparts + user_localparts = [(user.localpart,) for user in users] + + # Generate list of value tuples for each user + value_names = ("active", "batch") + values = [(int(active), batchnum) for _ in user_localparts] # type: List[Tuple] + + if not active and not hide: + # we are deactivating for real (not in hide mode) + # so clear the profile information + value_names += ("avatar_url", "displayname") + values = [v + (None, None) for v in values] + + return await self.db_pool.runInteraction( + "set_profiles_active", + self.db_pool.simple_upsert_many_txn, + table="profiles", + key_names=("user_id",), + key_values=user_localparts, + value_names=value_names, + value_values=values, + ) + + async def add_remote_profile_cache( + self, user_id: str, displayname: str, avatar_url: str + ) -> None: + """Ensure we are caching the remote user's profiles. + + This should only be called when `is_subscribed_remote_profile_for_user` + would return true for the user. + """ + await self.db_pool.simple_upsert( + table="remote_profile_cache", + keyvalues={"user_id": user_id}, + values={ + "displayname": displayname, + "avatar_url": avatar_url, + "last_check": self._clock.time_msec(), + }, + desc="add_remote_profile_cache", ) async def update_remote_profile_cache( self, user_id: str, displayname: Optional[str], avatar_url: Optional[str] ) -> int: - return await self.db_pool.simple_update( + return await self.db_pool.simple_upsert( table="remote_profile_cache", keyvalues={"user_id": user_id}, - updatevalues={ + values={ "displayname": displayname, "avatar_url": avatar_url, "last_check": self._clock.time_msec(), @@ -167,6 +293,17 @@ class ProfileWorkerStore(SQLBaseStore): class ProfileStore(ProfileWorkerStore): + def __init__(self, database, db_conn, hs): + super().__init__(database, db_conn, hs) + + self.db_pool.updates.register_background_index_update( + "profile_replication_status_host_index", + index_name="profile_replication_status_idx", + table="profile_replication_status", + columns=["host"], + unique=True, + ) + async def add_remote_profile_cache( self, user_id: str, displayname: str, avatar_url: str ) -> None: diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 0e8c168667..60996c78e9 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -16,7 +16,7 @@ import logging import random import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Union import attr @@ -311,6 +311,37 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): "set_account_validity_for_user", set_account_validity_for_user_txn ) + async def get_expired_users(self): + """Get UserIDs of all expired users. + + Users who are not active, or do not have profile information, are + excluded from the results. + + Returns: + Deferred[List[UserID]]: List of expired user IDs + """ + + def get_expired_users_txn(txn, now_ms): + # We need to use pattern matching as profiles.user_id is confusingly just the + # user's localpart, whereas account_validity.user_id is a full user ID + sql = """ + SELECT av.user_id from account_validity AS av + LEFT JOIN profiles as p + ON av.user_id LIKE '%%' || p.user_id || ':%%' + WHERE expiration_ts_ms <= ? + AND p.active = 1 + """ + txn.execute(sql, (now_ms,)) + rows = txn.fetchall() + + return [UserID.from_string(row[0]) for row in rows] + + res = await self.db_pool.runInteraction( + "get_expired_users", get_expired_users_txn, self._clock.time_msec() + ) + + return res + async def set_renewal_token_for_user( self, user_id: str, renewal_token: str ) -> None: @@ -431,6 +462,55 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): desc="delete_account_validity_for_user", ) + async def get_info_for_users( + self, + user_ids: Iterable[str], + ): + """Return the user info for a given set of users + + Args: + user_ids: A list of users to return information about + + Returns: + Deferred[Dict[str, bool]]: A dictionary mapping each user ID to + a dict with the following keys: + * expired - whether this is an expired user + * deactivated - whether this is a deactivated user + """ + # Get information of all our local users + def _get_info_for_users_txn(txn): + rows = [] + + for user_id in user_ids: + sql = """ + SELECT u.name, u.deactivated, av.expiration_ts_ms + FROM users as u + LEFT JOIN account_validity as av + ON av.user_id = u.name + WHERE u.name = ? + """ + + txn.execute(sql, (user_id,)) + row = txn.fetchone() + if row: + rows.append(row) + + return rows + + info_rows = await self.db_pool.runInteraction( + "get_info_for_users", _get_info_for_users_txn + ) + + return { + user_id: { + "expired": ( + expiration is not None and self._clock.time_msec() >= expiration + ), + "deactivated": deactivated == 1, + } + for user_id, deactivated, expiration in info_rows + } + async def is_server_admin(self, user: UserID) -> bool: """Determines if a user is an admin of this homeserver. diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 7d694d852d..d90e8f4f25 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import collections import logging from abc import abstractmethod @@ -681,6 +680,11 @@ class RoomWorkerStore(SQLBaseStore): Returns: dict[int, int]: "min_lifetime" and "max_lifetime" for this room. """ + # If the room retention feature is disabled, return a policy with no minimum nor + # maximum, in order not to filter out events we should filter out when sending to + # the client. + if not self.config.retention.retention_enabled: + return {"min_lifetime": None, "max_lifetime": None} def get_retention_policy_for_room_txn(txn): txn.execute( diff --git a/synapse/storage/schema/main/delta/48/profiles_batch.sql b/synapse/storage/schema/main/delta/48/profiles_batch.sql new file mode 100644 index 0000000000..e744c02fe8 --- /dev/null +++ b/synapse/storage/schema/main/delta/48/profiles_batch.sql @@ -0,0 +1,36 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Add a batch number to track changes to profiles and the + * order they're made in so we can replicate user profiles + * to other hosts as they change + */ +ALTER TABLE profiles ADD COLUMN batch BIGINT DEFAULT NULL; + +/* + * Index on the batch number so we can get profiles + * by their batch + */ +CREATE INDEX profiles_batch_idx ON profiles(batch); + +/* + * A table to track what batch of user profiles has been + * synced to what profile replication target. + */ +CREATE TABLE profile_replication_status ( + host TEXT NOT NULL, + last_synced_batch BIGINT NOT NULL +); diff --git a/synapse/storage/schema/main/delta/50/profiles_deactivated_users.sql b/synapse/storage/schema/main/delta/50/profiles_deactivated_users.sql new file mode 100644 index 0000000000..96051ac179 --- /dev/null +++ b/synapse/storage/schema/main/delta/50/profiles_deactivated_users.sql @@ -0,0 +1,23 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * A flag saying whether the user owning the profile has been deactivated + * This really belongs on the users table, not here, but the users table + * stores users by their full user_id and profiles stores them by localpart, + * so we can't easily join between the two tables. Plus, the batch number + * realy ought to represent data in this table that has changed. + */ +ALTER TABLE profiles ADD COLUMN active SMALLINT DEFAULT 1 NOT NULL; \ No newline at end of file diff --git a/synapse/storage/schema/main/delta/55/profile_replication_status_index.sql b/synapse/storage/schema/main/delta/55/profile_replication_status_index.sql new file mode 100644 index 0000000000..7542ab8cbd --- /dev/null +++ b/synapse/storage/schema/main/delta/55/profile_replication_status_index.sql @@ -0,0 +1,16 @@ +/* Copyright 2019 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE UNIQUE INDEX profile_replication_status_idx ON profile_replication_status(host); \ No newline at end of file diff --git a/synapse/storage/schema/main/delta/59/12account_validity_token_used_ts_ms.sql b/synapse/storage/schema/main/delta/58/19account_validity_token_used_ts_ms.sql index 4836dac16e..4836dac16e 100644 --- a/synapse/storage/schema/main/delta/59/12account_validity_token_used_ts_ms.sql +++ b/synapse/storage/schema/main/delta/58/19account_validity_token_used_ts_ms.sql diff --git a/synapse/storage/schema/main/delta/59/11add_knock_members_to_stats.sql b/synapse/storage/schema/main/delta/58/24add_knock_members_to_stats.sql index 8eb2196f6a..658f55a384 100644 --- a/synapse/storage/schema/main/delta/59/11add_knock_members_to_stats.sql +++ b/synapse/storage/schema/main/delta/58/24add_knock_members_to_stats.sql @@ -13,8 +13,5 @@ * limitations under the License. */ --- Existing rows will default to NULL, so anything reading from these tables --- needs to interpret NULL as 0. This is fine here as no existing rooms can have --- any knocked members. -ALTER TABLE room_stats_current ADD COLUMN knocked_members INT; -ALTER TABLE room_stats_historical ADD COLUMN knocked_members BIGINT; +ALTER TABLE room_stats_current ADD knocked_members INT NOT NULL DEFAULT '0'; +ALTER TABLE room_stats_historical ADD knocked_members BIGINT NOT NULL DEFAULT '0'; diff --git a/synapse/storage/schema/main/full_schemas/54/full.sql.postgres b/synapse/storage/schema/main/full_schemas/54/full.sql.postgres index 889a9a0ce4..20c5af2eb7 100644 --- a/synapse/storage/schema/main/full_schemas/54/full.sql.postgres +++ b/synapse/storage/schema/main/full_schemas/54/full.sql.postgres @@ -658,10 +658,19 @@ CREATE TABLE presence_stream ( +CREATE TABLE profile_replication_status ( + host text NOT NULL, + last_synced_batch bigint NOT NULL +); + + + CREATE TABLE profiles ( user_id text NOT NULL, displayname text, - avatar_url text + avatar_url text, + batch bigint, + active smallint DEFAULT 1 NOT NULL ); @@ -1788,6 +1797,10 @@ CREATE INDEX presence_stream_user_id ON presence_stream USING btree (user_id); +CREATE INDEX profiles_batch_idx ON profiles USING btree (batch); + + + CREATE INDEX public_room_index ON rooms USING btree (is_public); diff --git a/synapse/storage/schema/main/full_schemas/54/full.sql.sqlite b/synapse/storage/schema/main/full_schemas/54/full.sql.sqlite index 308124e531..301c566a70 100644 --- a/synapse/storage/schema/main/full_schemas/54/full.sql.sqlite +++ b/synapse/storage/schema/main/full_schemas/54/full.sql.sqlite @@ -6,7 +6,7 @@ CREATE TABLE presence_allow_inbound( observed_user_id TEXT NOT NULL, observer_us CREATE TABLE users( name TEXT, password_hash TEXT, creation_ts BIGINT, admin SMALLINT DEFAULT 0 NOT NULL, upgrade_ts BIGINT, is_guest SMALLINT DEFAULT 0 NOT NULL, appservice_id TEXT, consent_version TEXT, consent_server_notice_sent TEXT, user_type TEXT DEFAULT NULL, UNIQUE(name) ); CREATE TABLE access_tokens( id BIGINT PRIMARY KEY, user_id TEXT NOT NULL, device_id TEXT, token TEXT NOT NULL, last_used BIGINT, UNIQUE(token) ); CREATE TABLE user_ips ( user_id TEXT NOT NULL, access_token TEXT NOT NULL, device_id TEXT, ip TEXT NOT NULL, user_agent TEXT NOT NULL, last_seen BIGINT NOT NULL ); -CREATE TABLE profiles( user_id TEXT NOT NULL, displayname TEXT, avatar_url TEXT, UNIQUE(user_id) ); +CREATE TABLE profiles( user_id TEXT NOT NULL, displayname TEXT, avatar_url TEXT, batch BIGINT DEFAULT NULL, active SMALLINT DEFAULT 1 NOT NULL, UNIQUE(user_id) ); CREATE TABLE received_transactions( transaction_id TEXT, origin TEXT, ts BIGINT, response_code INTEGER, response_json bytea, has_been_referenced smallint default 0, UNIQUE (transaction_id, origin) ); CREATE TABLE destinations( destination TEXT PRIMARY KEY, retry_last_ts BIGINT, retry_interval INTEGER ); CREATE TABLE events( stream_ordering INTEGER PRIMARY KEY, topological_ordering BIGINT NOT NULL, event_id TEXT NOT NULL, type TEXT NOT NULL, room_id TEXT NOT NULL, content TEXT, unrecognized_keys TEXT, processed BOOL NOT NULL, outlier BOOL NOT NULL, depth BIGINT DEFAULT 0 NOT NULL, origin_server_ts BIGINT, received_ts BIGINT, sender TEXT, contains_url BOOLEAN, UNIQUE (event_id) ); @@ -192,6 +192,8 @@ CREATE INDEX group_users_u_idx ON group_users(user_id); CREATE INDEX group_invites_u_idx ON group_invites(user_id); CREATE UNIQUE INDEX group_rooms_g_idx ON group_rooms(group_id, room_id); CREATE INDEX group_rooms_r_idx ON group_rooms(room_id); +CREATE INDEX profiles_batch_idx ON profiles(batch); +CREATE TABLE profile_replication_status ( host TEXT NOT NULL, last_synced_batch BIGINT NOT NULL ); CREATE TABLE user_daily_visits ( user_id TEXT NOT NULL, device_id TEXT, timestamp BIGINT NOT NULL ); CREATE INDEX user_daily_visits_uts_idx ON user_daily_visits(user_id, timestamp); CREATE INDEX user_daily_visits_ts_idx ON user_daily_visits(timestamp); diff --git a/synapse/test_module/__init__.py b/synapse/test_module/__init__.py new file mode 100644 index 0000000000..5dc06b9ce5 --- /dev/null +++ b/synapse/test_module/__init__.py @@ -0,0 +1,29 @@ +import time + +from synapse.api.constants import EventTypes +from synapse.events import EventBase +from synapse.module_api import ModuleApi +from synapse.types import StateMap + + +class MySuperModule: + def __init__(self, config: dict, api: ModuleApi): + self.api = api + + self.api.register_third_party_rules_callbacks( + check_event_allowed=self.check_event_allowed, + ) + + async def check_event_allowed(self, event: EventBase, state: StateMap[EventBase]): + if event.is_state() and event.type == EventTypes.Member: + await self.api.create_and_send_event_into_room( + { + "room_id": event.room_id, + "sender": event.sender, + "type": "bzh.abolivier.test3", + "content": {"now": int(time.time())}, + "state_key": "", + } + ) + + return True, None diff --git a/synapse/test_module/test_account_validity/__init__.py b/synapse/test_module/test_account_validity/__init__.py new file mode 100644 index 0000000000..0b229e4aba --- /dev/null +++ b/synapse/test_module/test_account_validity/__init__.py @@ -0,0 +1,11 @@ +from typing import Optional + +from synapse.module_api import ModuleApi + + +class DummyAccountValidity: + def __init__(self, config: dict, api: ModuleApi): + api.register_account_validity_callbacks(is_user_expired=self.is_user_expired) + + async def is_user_expired(self, user_id: str) -> Optional[bool]: + return False diff --git a/synapse/types.py b/synapse/types.py index fb72f19343..b3e14c13b1 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -33,6 +33,7 @@ from typing import ( import attr from frozendict import frozendict from signedjson.key import decode_verify_key_bytes +from six.moves import filter from unpaddedbase64 import decode_base64 from zope.interface import Interface @@ -346,6 +347,19 @@ def contains_invalid_mxid_characters(localpart: str) -> bool: return any(c not in mxid_localpart_allowed_characters for c in localpart) +def strip_invalid_mxid_characters(localpart): + """Removes any invalid characters from an mxid + + Args: + localpart (basestring): the localpart to be stripped + + Returns: + localpart (basestring): the localpart having been stripped + """ + filtered = filter(lambda c: c in mxid_localpart_allowed_characters, localpart) + return "".join(filtered) + + UPPER_CASE_PATTERN = re.compile(b"[A-Z_]") # the following is a pattern which matches '=', and bytes which are not allowed in a mxid diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index ea1032b4fc..d26d8bb31e 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -154,7 +154,6 @@ def valid_id_server_location(id_server: str) -> bool: Returns: True if valid, False otherwise. """ - components = id_server.split("/", 1) host = components[0] diff --git a/synapse/util/threepids.py b/synapse/util/threepids.py index 389adf00f6..c7f8437e2c 100644 --- a/synapse/util/threepids.py +++ b/synapse/util/threepids.py @@ -32,7 +32,7 @@ logger = logging.getLogger(__name__) MAX_EMAIL_ADDRESS_LENGTH = 500 -def check_3pid_allowed(hs: "HomeServer", medium: str, address: str) -> bool: +async def check_3pid_allowed(hs: "HomeServer", medium: str, address: str) -> bool: """Checks whether a given format of 3PID is allowed to be used on this HS Args: @@ -43,6 +43,32 @@ def check_3pid_allowed(hs: "HomeServer", medium: str, address: str) -> bool: Returns: bool: whether the 3PID medium/address is allowed to be added to this HS """ + if hs.config.registration.check_is_for_allowed_local_3pids: + data = await hs.get_simple_http_client().get_json( + "https://%s%s" + % ( + hs.config.registration.check_is_for_allowed_local_3pids, + "/_matrix/identity/api/v1/internal-info", + ), + {"medium": medium, "address": address}, + ) + + # Check for invalid response + if "hs" not in data and "shadow_hs" not in data: + return False + + # Check if this user is intended to register for this homeserver + if ( + data.get("hs") != hs.config.server.server_name + and data.get("shadow_hs") != hs.config.server.server_name + ): + return False + + if data.get("requires_invite", False) and not data.get("invited", False): + # Requires an invite but hasn't been invited + return False + + return True if hs.config.registration.allowed_local_3pids: for constraint in hs.config.registration.allowed_local_3pids: diff --git a/sytest-blacklist b/sytest-blacklist index 57e603a4a6..092859a38a 100644 --- a/sytest-blacklist +++ b/sytest-blacklist @@ -35,3 +35,29 @@ Peeked rooms only turn up in the sync for the device who peeked them # Validation needs to be added to Synapse: #10554 Rejects invalid device keys + +# Blacklisted due to https://github.com/matrix-org/synapse-dinsic/issues/43 +Inviting an AS-hosted user asks the AS server +Accesing an AS-hosted room alias asks the AS server +Events in rooms with AS-hosted room aliases are sent to AS server + +# flaky test +If remote user leaves room we no longer receive device updates + +# flaky test +Can re-join room if re-invited + +# flaky test +Forgotten room messages cannot be paginated + +# flaky test +Local device key changes get to remote servers + +# flaky test +Old leaves are present in gapped incremental syncs + +# flaky test on workers +Old members are included in gappy incr LL sync if they start speaking + +# flaky test on workers +Presence changes to UNAVAILABLE are reported to remote room members diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 0ea4e753e2..68e6fd79ee 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -40,6 +40,8 @@ class DirectoryTestCase(unittest.HomeserverTestCase): self.mock_registry.register_query_handler = register_query_handler hs = self.setup_test_homeserver( + federation_http_client=None, + resource_for_federation=Mock(), federation_client=self.mock_federation, federation_registry=self.mock_registry, ) diff --git a/tests/handlers/test_identity.py b/tests/handlers/test_identity.py new file mode 100644 index 0000000000..6a0e784dd7 --- /dev/null +++ b/tests/handlers/test_identity.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import Mock + +from twisted.internet import defer + +import synapse.rest.admin +from synapse.rest.client import account, login + +from tests import unittest + + +class ThreepidISRewrittenURLTestCase(unittest.HomeserverTestCase): + + servlets = [ + synapse.rest.admin.register_servlets_for_client_rest_resource, + login.register_servlets, + account.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + self.address = "test@test" + self.is_server_name = "testis" + self.is_server_url = "https://testis" + self.rewritten_is_url = "https://int.testis" + + config = self.default_config() + config["trusted_third_party_id_servers"] = [self.is_server_name] + config["rewrite_identity_server_urls"] = { + self.is_server_url: self.rewritten_is_url + } + + mock_http_client = Mock(spec=["get_json", "post_json_get_json"]) + mock_http_client.get_json.side_effect = defer.succeed({}) + mock_http_client.post_json_get_json.return_value = defer.succeed( + {"address": self.address, "medium": "email"} + ) + + self.hs = self.setup_test_homeserver( + config=config, simple_http_client=mock_http_client + ) + + mock_blacklisting_http_client = Mock(spec=["get_json", "post_json_get_json"]) + mock_blacklisting_http_client.get_json.side_effect = defer.succeed({}) + mock_blacklisting_http_client.post_json_get_json.return_value = defer.succeed( + {"address": self.address, "medium": "email"} + ) + + # TODO: This class does not use a singleton to get it's http client + # This should be fixed for easier testing + # https://github.com/matrix-org/synapse-dinsic/issues/26 + self.hs.get_identity_handler().blacklisting_http_client = ( + mock_blacklisting_http_client + ) + + return self.hs + + def prepare(self, reactor, clock, hs): + self.user_id = self.register_user("kermit", "monkey") + + def test_rewritten_id_server(self): + """ + Tests that, when validating a 3PID association while rewriting the IS's server + name: + * the bind request is done against the rewritten hostname + * the original, non-rewritten, server name is stored in the database + """ + handler = self.hs.get_identity_handler() + post_json_get_json = handler.blacklisting_http_client.post_json_get_json + store = self.hs.get_datastore() + + creds = {"sid": "123", "client_secret": "some_secret"} + + # Make sure processing the mocked response goes through. + data = self.get_success( + handler.bind_threepid( + client_secret=creds["client_secret"], + sid=creds["sid"], + mxid=self.user_id, + id_server=self.is_server_name, + use_v2=False, + ) + ) + self.assertEqual(data.get("address"), self.address) + + # Check that the request was done against the rewritten server name. + post_json_get_json.assert_called_once_with( + "%s/_matrix/identity/api/v1/3pid/bind" % (self.rewritten_is_url,), + { + "sid": creds["sid"], + "client_secret": creds["client_secret"], + "mxid": self.user_id, + }, + headers={}, + ) + + # Check that the original server name is saved in the database instead of the + # rewritten one. + id_servers = self.get_success( + store.get_id_servers_user_bound(self.user_id, "email", self.address) + ) + self.assertEqual(id_servers, [self.is_server_name]) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index c153018fd8..42ff72a94a 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -59,7 +59,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): def test_get_my_name(self): self.get_success( - self.store.set_profile_displayname(self.frank.localpart, "Frank") + self.store.set_profile_displayname(self.frank.localpart, "Frank", 1) ) displayname = self.get_success(self.handler.get_displayname(self.frank)) @@ -114,7 +114,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): # Setting displayname for the first time is allowed self.get_success( - self.store.set_profile_displayname(self.frank.localpart, "Frank") + self.store.set_profile_displayname(self.frank.localpart, "Frank", 1) ) self.assertEquals( @@ -159,7 +159,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): def test_incoming_fed_query(self): self.get_success(self.store.create_profile("caroline")) - self.get_success(self.store.set_profile_displayname("caroline", "Caroline")) + self.get_success(self.store.set_profile_displayname("caroline", "Caroline", 1)) response = self.get_success( self.query_handlers["profile"]( @@ -176,7 +176,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): def test_get_my_avatar(self): self.get_success( self.store.set_profile_avatar_url( - self.frank.localpart, "http://my.server/me.png" + self.frank.localpart, "http://my.server/me.png", 1 ) ) avatar_url = self.get_success(self.handler.get_avatar_url(self.frank)) @@ -230,7 +230,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): # Setting displayname for the first time is allowed self.get_success( self.store.set_profile_avatar_url( - self.frank.localpart, "http://my.server/me.png" + self.frank.localpart, "http://my.server/me.png", 1 ) ) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index cd6f2c77ae..c010a1407b 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -23,9 +23,11 @@ from synapse.api.errors import ( SynapseError, ) from synapse.events.spamcheck import load_legacy_spam_checkers +from synapse.rest.client.register import _map_email_to_displayname, register_servlets from synapse.spam_checker_api import RegistrationBehaviour from synapse.types import RoomAlias, RoomID, UserID, create_requester +from tests.server import FakeChannel from tests.test_utils import make_awaitable from tests.unittest import override_config from tests.utils import mock_getRawHeaders @@ -121,6 +123,10 @@ class LegacyDenyAll(TestLegacyRegistrationSpamChecker): class RegistrationTestCase(unittest.HomeserverTestCase): """Tests the RegistrationHandler.""" + servlets = [ + register_servlets, + ] + def make_homeserver(self, reactor, clock): hs_config = self.default_config() @@ -685,6 +691,103 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.handler.register_user(localpart="bobflimflob", auth_provider_id="saml") ) + def test_email_to_displayname_mapping(self): + """Test that custom emails are mapped to new user displaynames correctly""" + self._check_mapping( + "jack-phillips.rivers@big-org.com", "Jack-Phillips Rivers [Big-Org]" + ) + + self._check_mapping("bob.jones@matrix.org", "Bob Jones [Tchap Admin]") + + self._check_mapping("bob-jones.blabla@gouv.fr", "Bob-Jones Blabla [Gouv]") + + # Multibyte unicode characters + self._check_mapping( + "j\u030a\u0065an-poppy.seed@example.com", + "J\u030a\u0065an-Poppy Seed [Example]", + ) + + def _check_mapping(self, i, expected): + result = _map_email_to_displayname(i) + self.assertEqual(result, expected) + + @override_config( + { + "bind_new_user_emails_to_sydent": "https://is.example.com", + "registrations_require_3pid": ["email"], + "account_threepid_delegates": {}, + "email": { + "smtp_host": "127.0.0.1", + "smtp_port": 20, + "require_transport_security": False, + "smtp_user": None, + "smtp_pass": None, + "notif_from": "test@example.com", + }, + "public_baseurl": "http://localhost", + } + ) + def test_user_email_bound_via_sydent_internal_api(self): + """Tests that emails are bound after registration if this option is set""" + # Register user with an email address + email = "alice@example.com" + + # Mock Synapse's threepid validator + get_threepid_validation_session = Mock( + return_value=make_awaitable( + {"medium": "email", "address": email, "validated_at": 0} + ) + ) + self.store.get_threepid_validation_session = get_threepid_validation_session + delete_threepid_session = Mock(return_value=make_awaitable(None)) + self.store.delete_threepid_session = delete_threepid_session + + # Mock Synapse's http json post method to check for the internal bind call + post_json_get_json = Mock(return_value=make_awaitable(None)) + self.hs.get_identity_handler().http_client.post_json_get_json = ( + post_json_get_json + ) + + # Retrieve a UIA session ID + channel = self.uia_register( + 401, {"username": "alice", "password": "nobodywillguessthis"} + ) + session_id = channel.json_body["session"] + + # Register our email address using the fake validation session above + channel = self.uia_register( + 200, + { + "username": "alice", + "password": "nobodywillguessthis", + "auth": { + "session": session_id, + "type": "m.login.email.identity", + "threepid_creds": {"sid": "blabla", "client_secret": "blablabla"}, + }, + }, + ) + self.assertEqual(channel.json_body["user_id"], "@alice:test") + + # Check that a bind attempt was made to our fake identity server + post_json_get_json.assert_called_with( + "https://is.example.com/_matrix/identity/internal/bind", + {"address": "alice@example.com", "medium": "email", "mxid": "@alice:test"}, + ) + + # Check that we stored a mapping of this bind + bound_threepids = self.get_success( + self.store.user_get_bound_threepids("@alice:test") + ) + self.assertListEqual(bound_threepids, [{"medium": "email", "address": email}]) + + def uia_register(self, expected_response: int, body: dict) -> FakeChannel: + """Make a register request.""" + channel = self.make_request("POST", "register", body) + + self.assertEqual(channel.code, expected_response) + return channel + async def get_or_create_user( self, requester, localpart, displayname, password_hash=None ): diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index 56207f4db6..216e0aa8bb 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -20,8 +20,14 @@ from tests import unittest # The expected number of state events in a fresh public room. EXPT_NUM_STATE_EVTS_IN_FRESH_PUBLIC_ROOM = 5 + # The expected number of state events in a fresh private room. -EXPT_NUM_STATE_EVTS_IN_FRESH_PRIVATE_ROOM = 6 +# +# Note: we increase this by 2 on the dinsic branch as we send +# a "im.vector.room.access_rules" state event into new private rooms, +# and an encryption state event as all private rooms are encrypted +# by default +EXPT_NUM_STATE_EVTS_IN_FRESH_PRIVATE_ROOM = 7 class StatsRoomTests(unittest.HomeserverTestCase): diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 70c621b825..1460c974bf 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -22,10 +22,17 @@ import synapse.rest.admin from synapse.api.constants import UserTypes from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.appservice import ApplicationService -from synapse.rest.client import login, register, room, user_directory +from synapse.rest.client import ( + account, + account_validity, + login, + register, + room, + user_directory, +) from synapse.server import HomeServer from synapse.storage.roommember import ProfileInfo -from synapse.types import create_requester +from synapse.types import JsonDict, create_requester from synapse.util import Clock from tests import unittest @@ -1053,3 +1060,130 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase): ) self.assertEquals(200, channel.code, channel.result) self.assertTrue(len(channel.json_body["results"]) == 0) + + +class UserInfoTestCase(unittest.FederatingHomeserverTestCase): + servlets = [ + login.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, + account_validity.register_servlets, + user_directory.register_servlets, + account.register_servlets, + ] + + def default_config(self) -> JsonDict: + config = super().default_config() + + # Set accounts to expire after a week + config["account_validity"] = { + "enabled": True, + "period": 604800000, # Time in ms for 1 week + } + return config + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + super(UserInfoTestCase, self).prepare(reactor, clock, hs) + self.store = hs.get_datastore() + self.handler = hs.get_user_directory_handler() + + def test_user_info(self) -> None: + """Test /users/info for local users from the Client-Server API""" + user_one, user_two, user_three, user_three_token = self.setup_test_users() + + # Request info about each user from user_three + channel = self.make_request( + "POST", + path="/_matrix/client/unstable/users/info", + content={"user_ids": [user_one, user_two, user_three]}, + access_token=user_three_token, + shorthand=False, + ) + self.assertEquals(200, channel.code, channel.result) + + # Check the state of user_one matches + user_one_info = channel.json_body[user_one] + self.assertTrue(user_one_info["deactivated"]) + self.assertFalse(user_one_info["expired"]) + + # Check the state of user_two matches + user_two_info = channel.json_body[user_two] + self.assertFalse(user_two_info["deactivated"]) + self.assertTrue(user_two_info["expired"]) + + # Check the state of user_three matches + user_three_info = channel.json_body[user_three] + self.assertFalse(user_three_info["deactivated"]) + self.assertFalse(user_three_info["expired"]) + + def test_user_info_federation(self) -> None: + """Test that /users/info can be called from the Federation API, and + and that we can query remote users from the Client-Server API + """ + user_one, user_two, user_three, user_three_token = self.setup_test_users() + + # Request information about our local users from the perspective of a remote server + channel = self.make_request( + "POST", + path="/_matrix/federation/unstable/users/info", + content={"user_ids": [user_one, user_two, user_three]}, + ) + self.assertEquals(200, channel.code) + + # Check the state of user_one matches + user_one_info = channel.json_body[user_one] + self.assertTrue(user_one_info["deactivated"]) + self.assertFalse(user_one_info["expired"]) + + # Check the state of user_two matches + user_two_info = channel.json_body[user_two] + self.assertFalse(user_two_info["deactivated"]) + self.assertTrue(user_two_info["expired"]) + + # Check the state of user_three matches + user_three_info = channel.json_body[user_three] + self.assertFalse(user_three_info["deactivated"]) + self.assertFalse(user_three_info["expired"]) + + def setup_test_users(self) -> Tuple[str, str, str, str]: + """Create an admin user and three test users, each with a different state""" + + # Create an admin user to expire other users with + self.register_user("admin", "adminpassword", admin=True) + admin_token = self.login("admin", "adminpassword") + + # Create three users + user_one = self.register_user("alice", "pass") + user_one_token = self.login("alice", "pass") + user_two = self.register_user("bob", "pass") + user_three = self.register_user("carl", "pass") + user_three_token = self.login("carl", "pass") + + # Deactivate user_one + self.deactivate(user_one, user_one_token) + + # Expire user_two + self.expire(user_two, admin_token) + + # Do nothing to user_three + + return user_one, user_two, user_three, user_three_token + + def expire(self, user_id_to_expire: str, admin_tok: str) -> None: + url = "/_synapse/admin/v1/account_validity/validity" + request_data = { + "user_id": user_id_to_expire, + "expiration_ts": 0, + "enable_renewal_emails": False, + } + channel = self.make_request("POST", url, request_data, access_token=admin_tok) + self.assertEquals(channel.result["code"], b"200", channel.result) + + def deactivate(self, user_id: str, tok: str) -> None: + request_data = { + "auth": {"type": "m.login.password", "user": user_id, "password": "pass"}, + "erase": False, + } + channel = self.make_request( + "POST", "account/deactivate", request_data, access_token=tok + ) + self.assertEqual(channel.code, 200) diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index d16cd141a7..4a17a3c27b 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -531,6 +531,29 @@ class ModuleApiTestCase(HomeserverTestCase): self.assertEqual(state[("org.matrix.test", "")].state_key, "") self.assertEqual(state[("org.matrix.test", "")].content, {}) + def test_get_room_state(self): + """Tests that a module can retrieve the state of a room through the module API.""" + user_id = self.register_user("peter", "hackme") + tok = self.login("peter", "hackme") + + # Create a room and send some custom state in it. + room_id = self.helper.create_room_as(tok=tok) + self.helper.send_state(room_id, "org.matrix.test", {}, tok=tok) + + # Check that the module API can successfully fetch state for the room. + state = self.get_success( + defer.ensureDeferred(self.module_api.get_room_state(room_id)) + ) + + # Check that a few standard events are in the returned state. + self.assertIn((EventTypes.Create, ""), state) + self.assertIn((EventTypes.Member, user_id), state) + + # Check that our custom state event is in the returned state. + self.assertEqual(state[("org.matrix.test", "")].sender, user_id) + self.assertEqual(state[("org.matrix.test", "")].state_key, "") + self.assertEqual(state[("org.matrix.test", "")].content, {}) + class ModuleApiWorkerTestCase(BaseMultiWorkerStreamTestCase): """For testing ModuleApi functionality in a multi-worker setup""" diff --git a/tests/push/test_http.py b/tests/push/test_http.py index c068d329a9..402eceb6a3 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -399,8 +399,8 @@ class HTTPPusherTests(HomeserverTestCase): self.push_attempts[1][1], "http://example.com/_matrix/push/v1/notify" ) - # check that this is low-priority - self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low") + # check that this is high-priority + self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high") def test_sends_high_priority_for_mention(self): """ @@ -475,8 +475,8 @@ class HTTPPusherTests(HomeserverTestCase): self.push_attempts[1][1], "http://example.com/_matrix/push/v1/notify" ) - # check that this is low-priority - self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low") + # check that this is high-priority + self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high") def test_sends_high_priority_for_atroom(self): """ @@ -558,8 +558,8 @@ class HTTPPusherTests(HomeserverTestCase): self.push_attempts[1][1], "http://example.com/_matrix/push/v1/notify" ) - # check that this is low-priority - self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low") + # check that this is high-priority + self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high") def test_push_unread_count_group_by_room(self): """ diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index 5011e54563..6a641756e9 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -781,9 +781,9 @@ class UsersListTestCase(unittest.HomeserverTestCase): # Set avatar URL to all users, that no user has a NULL value to avoid # different sort order between SQlite and PostreSQL - self.get_success(self.store.set_profile_avatar_url("user1", "mxc://url3")) - self.get_success(self.store.set_profile_avatar_url("user2", "mxc://url2")) - self.get_success(self.store.set_profile_avatar_url("admin", "mxc://url1")) + self.get_success(self.store.set_profile_avatar_url("user1", "mxc://url3", 1)) + self.get_success(self.store.set_profile_avatar_url("user2", "mxc://url2", 1)) + self.get_success(self.store.set_profile_avatar_url("admin", "mxc://url1", 1)) # order by default (name) self._order_test([self.admin_user, user1, user2], None) @@ -924,7 +924,7 @@ class DeactivateAccountTestCase(unittest.HomeserverTestCase): # set attributes for user self.get_success( - self.store.set_profile_avatar_url("user", "mxc://servername/mediaid") + self.store.set_profile_avatar_url("user", "mxc://servername/mediaid", 1) ) self.get_success( self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0) @@ -1085,8 +1085,13 @@ class DeactivateAccountTestCase(unittest.HomeserverTestCase): self.assertEqual("@user:test", channel.json_body["name"]) self.assertEqual(True, channel.json_body["deactivated"]) self.assertEqual(0, len(channel.json_body["threepids"])) - self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) - self.assertEqual("User1", channel.json_body["displayname"]) + + # On DINUM's deployment we clear the profile information during a deactivation regardless, + # whereas on mainline we decided to only do this if the deactivation was performed with erase: True. + # The discrepancy is due to profile replication. + # See synapse.storage.databases.main.profile.ProfileWorkerStore.set_profiles_active + self.assertIsNone(channel.json_body["avatar_url"]) + self.assertIsNone(channel.json_body["displayname"]) self._is_erased("@user:test", False) @@ -2052,7 +2057,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): # set attributes for user self.get_success( - self.store.set_profile_avatar_url("user", "mxc://servername/mediaid") + self.store.set_profile_avatar_url("user", "mxc://servername/mediaid", 1) ) self.get_success( self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0) @@ -2069,6 +2074,11 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual("@user:test", channel.json_body["name"]) self.assertFalse(channel.json_body["deactivated"]) self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"]) + + # On DINUM's deployment we clear the profile information during a deactivation regardless, + # whereas on mainline we decided to only do this if the deactivation was performed with erase: True. + # The discrepancy is due to profile replication. + # See synapse.storage.databases.main.profile.ProfileWorkerStore.set_profiles_active self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) self.assertEqual("User", channel.json_body["displayname"]) @@ -2085,8 +2095,13 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertTrue(channel.json_body["deactivated"]) self.assertIsNone(channel.json_body["password_hash"]) self.assertEqual(0, len(channel.json_body["threepids"])) - self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) - self.assertEqual("User", channel.json_body["displayname"]) + + # On DINUM's deployment we clear the profile information during a deactivation regardless, + # whereas on mainline we decided to only do this if the deactivation was performed with erase: True. + # The discrepancy is due to profile replication. + # See synapse.storage.databases.main.profile.ProfileWorkerStore.set_profiles_active + self.assertIsNone(channel.json_body["avatar_url"]) + self.assertIsNone(channel.json_body["displayname"]) # the user is deactivated, the threepid will be deleted # Get user @@ -2101,8 +2116,8 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertTrue(channel.json_body["deactivated"]) self.assertIsNone(channel.json_body["password_hash"]) self.assertEqual(0, len(channel.json_body["threepids"])) - self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) - self.assertEqual("User", channel.json_body["displayname"]) + self.assertIsNone(channel.json_body["avatar_url"]) + self.assertIsNone(channel.json_body["displayname"]) @override_config({"user_directory": {"enabled": True, "search_all_users": True}}) def test_change_name_deactivate_user_user_directory(self): diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index becb4e8dcc..e0735ef4a3 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -12,17 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json +from unittest.mock import Mock + +from twisted.internet import defer import synapse.rest.admin -from synapse.rest.client import login, room +from synapse.rest.client import account, login, room from tests import unittest -class IdentityTestCase(unittest.HomeserverTestCase): +class IdentityDisabledTestCase(unittest.HomeserverTestCase): + """Tests that 3PID lookup attempts fail when the HS's config disallows them.""" servlets = [ + account.register_servlets, synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, @@ -31,29 +35,130 @@ class IdentityTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): config = self.default_config() + config["trusted_third_party_id_servers"] = ["testis"] config["enable_3pid_lookup"] = False self.hs = self.setup_test_homeserver(config=config) return self.hs + def prepare(self, reactor, clock, hs): + self.user_id = self.register_user("kermit", "monkey") + self.tok = self.login("kermit", "monkey") + + def test_3pid_invite_disabled(self): + channel = self.make_request(b"POST", "/createRoom", {}, access_token=self.tok) + self.assertEquals(channel.result["code"], b"200", channel.result) + room_id = channel.json_body["room_id"] + + data = { + "id_server": "testis", + "medium": "email", + "address": "test@example.com", + } + request_url = ("/rooms/%s/invite" % (room_id)).encode("ascii") + channel = self.make_request(b"POST", request_url, data, access_token=self.tok) + self.assertEquals(channel.result["code"], b"403", channel.result) + def test_3pid_lookup_disabled(self): self.hs.config.registration.enable_3pid_lookup = False - self.register_user("kermit", "monkey") - tok = self.login("kermit", "monkey") + url = ( + "/_matrix/client/unstable/account/3pid/lookup" + "?id_server=testis&medium=email&address=foo@bar.baz" + ) + channel = self.make_request("GET", url, access_token=self.tok) + self.assertEqual(channel.result["code"], b"403", channel.result) + + def test_3pid_bulk_lookup_disabled(self): + url = "/_matrix/client/unstable/account/3pid/bulk_lookup" + data = { + "id_server": "testis", + "threepids": [["email", "foo@bar.baz"], ["email", "john.doe@matrix.org"]], + } + channel = self.make_request("POST", url, data, access_token=self.tok) + self.assertEqual(channel.result["code"], b"403", channel.result) + + +class IdentityEnabledTestCase(unittest.HomeserverTestCase): + """Tests that 3PID lookup attempts succeed when the HS's config allows them.""" + + servlets = [ + account.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, + room.register_servlets, + login.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + + config = self.default_config() + config["enable_3pid_lookup"] = True + config["trusted_third_party_id_servers"] = ["testis"] + + mock_http_client = Mock(spec=["get_json", "post_json_get_json"]) + mock_http_client.get_json.return_value = defer.succeed({"mxid": "@f:test"}) + mock_http_client.post_json_get_json.return_value = defer.succeed({}) + + self.hs = self.setup_test_homeserver( + config=config, simple_http_client=mock_http_client + ) + + # TODO: This class does not use a singleton to get it's http client + # This should be fixed for easier testing + # https://github.com/matrix-org/synapse-dinsic/issues/26 + self.hs.get_identity_handler().http_client = mock_http_client - channel = self.make_request(b"POST", "/createRoom", b"{}", access_token=tok) + return self.hs + + def prepare(self, reactor, clock, hs): + self.user_id = self.register_user("kermit", "monkey") + self.tok = self.login("kermit", "monkey") + + def test_3pid_invite_enabled(self): + channel = self.make_request( + b"POST", "/createRoom", b"{}", access_token=self.tok + ) self.assertEquals(channel.result["code"], b"200", channel.result) room_id = channel.json_body["room_id"] - params = { + data = { "id_server": "testis", "medium": "email", "address": "test@example.com", } - request_data = json.dumps(params) request_url = ("/rooms/%s/invite" % (room_id)).encode("ascii") - channel = self.make_request( - b"POST", request_url, request_data, access_token=tok + channel = self.make_request(b"POST", request_url, data, access_token=self.tok) + + get_json = self.hs.get_identity_handler().http_client.get_json + get_json.assert_called_once_with( + "https://testis/_matrix/identity/api/v1/lookup", + {"address": "test@example.com", "medium": "email"}, + ) + self.assertEquals(channel.result["code"], b"200", channel.result) + + def test_3pid_lookup_enabled(self): + url = ( + "/_matrix/client/unstable/account/3pid/lookup" + "?id_server=testis&medium=email&address=foo@bar.baz" + ) + self.make_request("GET", url, access_token=self.tok) + + get_json = self.hs.get_simple_http_client().get_json + get_json.assert_called_once_with( + "https://testis/_matrix/identity/api/v1/lookup", + {"address": "foo@bar.baz", "medium": "email"}, + ) + + def test_3pid_bulk_lookup_enabled(self): + url = "/_matrix/client/unstable/account/3pid/bulk_lookup" + data = { + "id_server": "testis", + "threepids": [["email", "foo@bar.baz"], ["email", "john.doe@matrix.org"]], + } + self.make_request("POST", url, data, access_token=self.tok) + + post_json = self.hs.get_simple_http_client().post_json_get_json + post_json.assert_called_once_with( + "https://testis/_matrix/identity/api/v1/bulk_lookup", + {"threepids": [["email", "foo@bar.baz"], ["email", "john.doe@matrix.org"]]}, ) - self.assertEquals(channel.result["code"], b"403", channel.result) diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 6e7c0f11df..f1265a824d 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -16,14 +16,29 @@ import datetime import json import os +import os.path +import tempfile +from unittest.mock import Mock import pkg_resources +from twisted.internet import defer + import synapse.rest.admin from synapse.api.constants import APP_SERVICE_REGISTRATION_TYPE, LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService -from synapse.rest.client import account, account_validity, login, logout, register, sync +from synapse.rest.client import ( + account, + account_validity, + login, + logout, + profile, + register, + room, + sync, + user_directory, +) from synapse.storage._base import db_to_json from tests import unittest @@ -107,13 +122,6 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals(channel.json_body["error"], "Invalid password") - def test_POST_bad_username(self): - request_data = json.dumps({"username": 777, "password": "monkey"}) - channel = self.make_request(b"POST", self.url, request_data) - - self.assertEquals(channel.result["code"], b"400", channel.result) - self.assertEquals(channel.json_body["error"], "Invalid username") - def test_POST_user_valid(self): user_id = "@kermit:test" device_id = "frogfone" @@ -727,6 +735,96 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): ) +class RegisterHideProfileTestCase(unittest.HomeserverTestCase): + + servlets = [synapse.rest.admin.register_servlets_for_client_rest_resource] + + def make_homeserver(self, reactor, clock): + + self.url = b"/_matrix/client/r0/register" + + config = self.default_config() + config["enable_registration"] = True + config["show_users_in_user_directory"] = False + config["replicate_user_profiles_to"] = ["fakeserver"] + + mock_http_client = Mock(spec=["get_json", "post_json_get_json"]) + mock_http_client.post_json_get_json.return_value = defer.succeed((200, "{}")) + + self.hs = self.setup_test_homeserver( + config=config, simple_http_client=mock_http_client + ) + + return self.hs + + def test_profile_hidden(self): + user_id = self.register_user("kermit", "monkey") + + post_json = self.hs.get_simple_http_client().post_json_get_json + + # We expect post_json_get_json to have been called twice: once with the original + # profile and once with the None profile resulting from the request to hide it + # from the user directory. + self.assertEqual(post_json.call_count, 2, post_json.call_args_list) + + # Get the args (and not kwargs) passed to post_json. + args = post_json.call_args[0] + # Make sure the last call was attempting to replicate profiles. + split_uri = args[0].split("/") + self.assertEqual(split_uri[len(split_uri) - 1], "replicate_profiles", args[0]) + # Make sure the last profile update was overriding the user's profile to None. + self.assertEqual(args[1]["batch"][user_id], None, args[1]) + + +class AccountValidityTemplateDirectoryTestCase(unittest.HomeserverTestCase): + def make_homeserver(self, reactor, clock): + config = self.default_config() + + # Create a custom template directory and a template inside to read + temp_dir = tempfile.mkdtemp() + self.account_renewed_fd, account_renewed_path = tempfile.mkstemp(dir=temp_dir) + self.invalid_token_fd, invalid_token_path = tempfile.mkstemp(dir=temp_dir) + + self.account_renewed_template_contents = "Yay, your account has been renewed" + self.invalid_token_template_contents = "Boo, you used an invalid token. Booo" + + # Add some content to the custom templates + with open(account_renewed_path, "w") as f: + f.write(self.account_renewed_template_contents) + + with open(invalid_token_path, "w") as f: + f.write(self.invalid_token_template_contents) + + # Write the config, specifying the custom template directory and name of the custom + # template files. They must be different than those that exist in the default + # template directory in order to properly test everything. + config["enable_registration"] = True + config["account_validity"] = { + "enabled": True, + "period": 604800000, # Time in ms for 1 week + "template_dir": temp_dir, + "account_renewed_html_path": os.path.basename(account_renewed_path), + "invalid_token_html_path": os.path.basename(invalid_token_path), + } + self.hs = self.setup_test_homeserver(config=config) + + return self.hs + + def test_template_contents(self): + """Tests that the contents of the custom templates as specified in the config are + correct. + """ + self.assertEquals( + self.hs.config.account_validity.account_validity_account_renewed_template.render(), + self.account_renewed_template_contents, + ) + + self.assertEquals( + self.hs.config.account_validity.account_validity_invalid_token_template.render(), + self.invalid_token_template_contents, + ) + + class AccountValidityTestCase(unittest.HomeserverTestCase): servlets = [ @@ -736,6 +834,7 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): sync.register_servlets, logout.register_servlets, account_validity.register_servlets, + account.register_servlets, ] def make_homeserver(self, reactor, clock): @@ -846,6 +945,146 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.result["code"], b"200", channel.result) +class AccountValidityUserDirectoryTestCase(unittest.HomeserverTestCase): + + servlets = [ + profile.register_servlets, + room.register_servlets, + user_directory.register_servlets, + login.register_servlets, + register.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, + account_validity.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + + # Set accounts to expire after a week + config["enable_registration"] = True + config["account_validity"] = { + "enabled": True, + "period": 604800000, # Time in ms for 1 week + } + config["replicate_user_profiles_to"] = "test.is" + + # Mock homeserver requests to an identity server + mock_http_client = Mock(spec=["post_json_get_json"]) + mock_http_client.post_json_get_json.return_value = defer.succeed((200, "{}")) + + self.hs = self.setup_test_homeserver( + config=config, simple_http_client=mock_http_client + ) + + return self.hs + + def test_expired_user_in_directory(self): + """Test that an expired user is hidden in the user directory""" + # Create an admin user to search the user directory + admin_id = self.register_user("admin", "adminpassword", admin=True) + admin_tok = self.login("admin", "adminpassword") + + # Ensure the admin never expires + url = "/_synapse/admin/v1/account_validity/validity" + params = { + "user_id": admin_id, + "expiration_ts": 999999999999, + "enable_renewal_emails": False, + } + request_data = json.dumps(params) + channel = self.make_request(b"POST", url, request_data, access_token=admin_tok) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # Mock the homeserver's HTTP client + post_json = self.hs.get_simple_http_client().post_json_get_json + + # Create a user + username = "kermit" + user_id = self.register_user(username, "monkey") + self.login(username, "monkey") + self.get_success( + self.hs.get_datastore().set_profile_displayname(username, "mr.kermit", 1) + ) + + # Check that a full profile for this user is replicated + self.assertIsNotNone(post_json.call_args, post_json.call_args) + payload = post_json.call_args[0][1] + batch = payload.get("batch") + + self.assertIsNotNone(batch, batch) + self.assertEquals(len(batch), 1, batch) + + replicated_user_id = list(batch.keys())[0] + self.assertEquals(replicated_user_id, user_id, replicated_user_id) + + # There was replicated information about our user + # Check that it's not None + replicated_content = batch[user_id] + self.assertIsNotNone(replicated_content) + + # Expire the user + url = "/_synapse/admin/v1/account_validity/validity" + params = { + "user_id": user_id, + "expiration_ts": 0, + "enable_renewal_emails": False, + } + request_data = json.dumps(params) + channel = self.make_request(b"POST", url, request_data, access_token=admin_tok) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # Wait for the background job to run which hides expired users in the directory + self.reactor.advance(60 * 60 * 1000) + + # Check if the homeserver has replicated the user's profile to the identity server + self.assertIsNotNone(post_json.call_args, post_json.call_args) + payload = post_json.call_args[0][1] + batch = payload.get("batch") + + self.assertIsNotNone(batch, batch) + self.assertEquals(len(batch), 1, batch) + + replicated_user_id = list(batch.keys())[0] + self.assertEquals(replicated_user_id, user_id, replicated_user_id) + + # There was replicated information about our user + # Check that it's None, signifying that the user should be removed from the user + # directory because they were expired + replicated_content = batch[user_id] + self.assertIsNone(replicated_content) + + # Now renew the user, and check they get replicated again to the identity server + url = "/_synapse/admin/v1/account_validity/validity" + params = { + "user_id": user_id, + "expiration_ts": 99999999999, + "enable_renewal_emails": False, + } + request_data = json.dumps(params) + channel = self.make_request(b"POST", url, request_data, access_token=admin_tok) + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.pump(10) + self.reactor.advance(10) + self.pump() + + # Check if the homeserver has replicated the user's profile to the identity server + post_json = self.hs.get_simple_http_client().post_json_get_json + self.assertNotEquals(post_json.call_args, None, post_json.call_args) + payload = post_json.call_args[0][1] + batch = payload.get("batch") + self.assertNotEquals(batch, None, batch) + self.assertEquals(len(batch), 1, batch) + replicated_user_id = list(batch.keys())[0] + self.assertEquals(replicated_user_id, user_id, replicated_user_id) + + # There was replicated information about our user + # Check that it's not None, signifying that the user is back in the user + # directory + replicated_content = batch[user_id] + self.assertIsNotNone(replicated_content) + + class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): servlets = [ @@ -1100,8 +1339,6 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): """ user_id = self.register_user("kermit_delta", "user") - self.hs.config.account_validity.startup_job_max_delta = self.max_delta - now_ms = self.hs.get_clock().time_msec() self.get_success(self.store._set_expiration_date_when_missing()) diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py index b58452195a..5c3bef645f 100644 --- a/tests/rest/client/test_retention.py +++ b/tests/rest/client/test_retention.py @@ -33,6 +33,7 @@ class RetentionTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): config = self.default_config() + config["default_room_version"] = "1" config["retention"] = { "enabled": True, "default_policy": { @@ -242,6 +243,7 @@ class RetentionNoDefaultPolicyTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): config = self.default_config() + config["default_room_version"] = "1" config["retention"] = { "enabled": True, } diff --git a/tests/storage/test_main.py b/tests/storage/test_main.py index d2b7b89952..3dcf6a22e5 100644 --- a/tests/storage/test_main.py +++ b/tests/storage/test_main.py @@ -38,7 +38,7 @@ class DataStoreTestCase(unittest.TestCase): ) yield defer.ensureDeferred(self.store.create_profile(self.user.localpart)) yield defer.ensureDeferred( - self.store.set_profile_displayname(self.user.localpart, self.displayname) + self.store.set_profile_displayname(self.user.localpart, self.displayname, 1) ) users, total = yield defer.ensureDeferred( diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index d37736edf8..bae4f6c224 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -30,7 +30,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): self.get_success(self.store.create_profile(self.u_frank.localpart)) self.get_success( - self.store.set_profile_displayname(self.u_frank.localpart, "Frank") + self.store.set_profile_displayname(self.u_frank.localpart, "Frank", 1) ) self.assertEquals( @@ -44,7 +44,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): # test set to None self.get_success( - self.store.set_profile_displayname(self.u_frank.localpart, None) + self.store.set_profile_displayname(self.u_frank.localpart, None, 1) ) self.assertIsNone( @@ -56,7 +56,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): self.get_success( self.store.set_profile_avatar_url( - self.u_frank.localpart, "http://my.site/here" + self.u_frank.localpart, "http://my.site/here", 1 ) ) @@ -71,7 +71,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): # test set to None self.get_success( - self.store.set_profile_avatar_url(self.u_frank.localpart, None) + self.store.set_profile_avatar_url(self.u_frank.localpart, None, 1) ) self.assertIsNone( diff --git a/tests/test_types.py b/tests/test_types.py index 0d0c00d97a..77d12fad4c 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -11,9 +11,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from six import string_types from synapse.api.errors import SynapseError -from synapse.types import GroupID, RoomAlias, UserID, map_username_to_mxid_localpart +from synapse.types import ( + GroupID, + RoomAlias, + UserID, + map_username_to_mxid_localpart, + strip_invalid_mxid_characters, +) from tests import unittest @@ -104,3 +111,16 @@ class MapUsernameTestCase(unittest.TestCase): # this should work with either a unicode or a bytes self.assertEqual(map_username_to_mxid_localpart("têst"), "t=c3=aast") self.assertEqual(map_username_to_mxid_localpart("têst".encode()), "t=c3=aast") + + +class StripInvalidMxidCharactersTestCase(unittest.TestCase): + def test_return_type(self): + unstripped = strip_invalid_mxid_characters("test") + stripped = strip_invalid_mxid_characters("test@") + + self.assertTrue(isinstance(unstripped, string_types), type(unstripped)) + self.assertTrue(isinstance(stripped, string_types), type(stripped)) + + def test_strip(self): + stripped = strip_invalid_mxid_characters("test@") + self.assertEqual(stripped, "test", stripped) diff --git a/tests/utils.py b/tests/utils.py index 983859120f..a67d9595e3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -172,6 +172,8 @@ def default_config(name, parse=False): "update_user_directory": False, "caches": {"global_factor": 1}, "listeners": [{"port": 0, "type": "http"}], + # Enable encryption by default in private rooms + "encryption_enabled_by_default_for_room_type": "invite", } if parse: diff --git a/tox.ini b/tox.ini index cfe6a06942..be00f7914d 100644 --- a/tox.ini +++ b/tox.ini @@ -166,7 +166,7 @@ skip_install = true usedevelop = false deps = towncrier>=18.6.0rc1 commands = - python -m towncrier.check --compare-with=origin/develop + python -m towncrier.check --compare-with=origin/dinsic [testenv:check-sampleconfig] commands = {toxinidir}/scripts-dev/generate_sample_config --check |