summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.buildkite/docker-compose.py27.pg94.yaml21
-rw-r--r--.buildkite/docker-compose.py27.pg95.yaml21
-rw-r--r--.buildkite/format_tap.py33
-rwxr-xr-x.buildkite/merge_base_branch.sh (renamed from .circleci/merge_base_branch.sh)17
-rw-r--r--.buildkite/pipeline.yml93
-rw-r--r--.buildkite/synapse_sytest.sh145
-rw-r--r--.circleci/config.yml137
-rw-r--r--.github/FUNDING.yml4
-rw-r--r--changelog.d/5252.feature1
-rw-r--r--changelog.d/5325.bugfix1
-rw-r--r--changelog.d/5363.feature1
-rw-r--r--changelog.d/5378.misc1
-rw-r--r--changelog.d/5381.misc1
-rw-r--r--changelog.d/5382.misc1
-rw-r--r--changelog.d/5383.misc1
-rw-r--r--changelog.d/5384.feature1
-rw-r--r--changelog.d/5386.misc1
-rw-r--r--changelog.d/5387.bugfix1
-rw-r--r--changelog.d/5388.bugfix1
-rw-r--r--changelog.d/5389.bugfix1
-rw-r--r--changelog.d/5390.bugfix1
-rw-r--r--changelog.d/5394.bugfix1
-rw-r--r--changelog.d/5412.feature1
-rw-r--r--changelog.d/5425.removal1
-rw-r--r--changelog.d/5440.feature1
-rw-r--r--changelog.d/5447.misc1
-rw-r--r--changelog.d/5458.feature1
-rw-r--r--changelog.d/5459.misc1
-rw-r--r--changelog.d/5460.misc1
-rw-r--r--changelog.d/5461.feature1
-rw-r--r--changelog.d/5464.bugfix1
-rw-r--r--changelog.d/5465.misc2
-rwxr-xr-xdemo/start.sh4
-rw-r--r--docker/README.md2
-rw-r--r--docs/sample_config.yaml13
-rwxr-xr-xscripts-dev/federation_client.py43
-rwxr-xr-xscripts/generate_signing_key.py2
-rwxr-xr-xscripts/synapse_port_db1
-rwxr-xr-xsetup.py10
-rw-r--r--synapse/__init__.py7
-rw-r--r--synapse/api/auth.py15
-rwxr-xr-xsynapse/app/homeserver.py1
-rw-r--r--synapse/config/emailconfig.py11
-rw-r--r--synapse/config/homeserver.py2
-rw-r--r--synapse/config/third_party_event_rules.py42
-rw-r--r--synapse/events/third_party_rules.py62
-rw-r--r--synapse/federation/sender/per_destination_queue.py40
-rw-r--r--synapse/groups/attestations.py11
-rw-r--r--synapse/handlers/account_validity.py3
-rw-r--r--synapse/handlers/deactivate_account.py10
-rw-r--r--synapse/handlers/federation.py74
-rw-r--r--synapse/handlers/groups_local.py4
-rw-r--r--synapse/handlers/message.py14
-rw-r--r--synapse/handlers/profile.py29
-rw-r--r--synapse/http/client.py13
-rw-r--r--synapse/metrics/__init__.py111
-rw-r--r--synapse/push/emailpusher.py19
-rw-r--r--synapse/push/presentable_names.py11
-rw-r--r--synapse/push/pusherpool.py30
-rw-r--r--synapse/python_dependencies.py4
-rw-r--r--synapse/replication/http/_base.py10
-rw-r--r--synapse/rest/client/v2_alpha/account.py19
-rw-r--r--synapse/rest/client/v2_alpha/account_validity.py2
-rw-r--r--synapse/rest/media/v1/media_repository.py6
-rw-r--r--synapse/server.py7
-rw-r--r--synapse/storage/__init__.py44
-rw-r--r--synapse/storage/_base.py4
-rw-r--r--synapse/storage/events.py45
-rw-r--r--synapse/storage/registration.py130
-rw-r--r--synapse/storage/schema/delta/55/users_alter_deactivated.sql19
-rwxr-xr-xsynctl19
-rw-r--r--tests/push/test_email.py93
-rw-r--r--tests/rest/client/third_party_rules.py79
-rw-r--r--tests/rest/client/v2_alpha/test_account.py45
-rw-r--r--tests/rest/client/v2_alpha/test_register.py82
-rw-r--r--tests/storage/test_cleanup_extrems.py128
-rw-r--r--tests/storage/test_event_metrics.py82
-rw-r--r--tests/unittest.py61
-rw-r--r--tox.ini4
79 files changed, 1477 insertions, 486 deletions
diff --git a/.buildkite/docker-compose.py27.pg94.yaml b/.buildkite/docker-compose.py27.pg94.yaml
deleted file mode 100644

index 2d4b9eadd9..0000000000 --- a/.buildkite/docker-compose.py27.pg94.yaml +++ /dev/null
@@ -1,21 +0,0 @@ -version: '3.1' - -services: - - postgres: - image: postgres:9.4 - environment: - POSTGRES_PASSWORD: postgres - - testenv: - image: python:2.7 - depends_on: - - postgres - env_file: .env - environment: - SYNAPSE_POSTGRES_HOST: postgres - SYNAPSE_POSTGRES_USER: postgres - SYNAPSE_POSTGRES_PASSWORD: postgres - working_dir: /app - volumes: - - ..:/app diff --git a/.buildkite/docker-compose.py27.pg95.yaml b/.buildkite/docker-compose.py27.pg95.yaml deleted file mode 100644
index c6a41f1da0..0000000000 --- a/.buildkite/docker-compose.py27.pg95.yaml +++ /dev/null
@@ -1,21 +0,0 @@ -version: '3.1' - -services: - - postgres: - image: postgres:9.5 - environment: - POSTGRES_PASSWORD: postgres - - testenv: - image: python:2.7 - depends_on: - - postgres - env_file: .env - environment: - SYNAPSE_POSTGRES_HOST: postgres - SYNAPSE_POSTGRES_USER: postgres - SYNAPSE_POSTGRES_PASSWORD: postgres - working_dir: /app - volumes: - - ..:/app diff --git a/.buildkite/format_tap.py b/.buildkite/format_tap.py new file mode 100644
index 0000000000..94582f5571 --- /dev/null +++ b/.buildkite/format_tap.py
@@ -0,0 +1,33 @@ +import sys +from tap.parser import Parser +from tap.line import Result, Unknown, Diagnostic + +out = ["### TAP Output for " + sys.argv[2]] + +p = Parser() + +in_error = False + +for line in p.parse_file(sys.argv[1]): + if isinstance(line, Result): + if in_error: + out.append("") + out.append("</pre></code></details>") + out.append("") + out.append("----") + out.append("") + in_error = False + + if not line.ok and not line.todo: + in_error = True + + out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description)) + out.append("") + out.append("<details><summary>Show log</summary><code><pre>") + + elif isinstance(line, Diagnostic) and in_error: + out.append(line.text) + +if out: + for line in out[:-3]: + print(line) diff --git a/.circleci/merge_base_branch.sh b/.buildkite/merge_base_branch.sh
index 4c19fa70d7..26176d6465 100755 --- a/.circleci/merge_base_branch.sh +++ b/.buildkite/merge_base_branch.sh
@@ -1,22 +1,21 @@ #!/usr/bin/env bash -set -e +set -ex -# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful. -# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL. -echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV -source $BASH_ENV +if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then + echo "Not merging forward, as this is a release branch" + exit 0 +fi -if [[ -z "${CIRCLE_PR_NUMBER}" ]] -then - echo "Can't figure out what the PR number is! Assuming merge target is develop." +if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then + echo "Not a pull request, or hasn't had a PR opened yet..." # It probably hasn't had a PR opened yet. Since all PRs land on develop, we # can probably assume it's based on it and will be merged into it. GITBASE="develop" else # Get the reference, using the GitHub API - GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` + GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH fi # Show what we are before diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index 719f22b4e1..6c6229a205 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml
@@ -2,6 +2,7 @@ env: CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f" steps: + - command: - "python -m pip install tox" - "tox -e pep8" @@ -46,15 +47,16 @@ steps: - wait + - command: - "python -m pip install tox" - - "tox -e py27,codecov" - label: ":python: 2.7 / SQLite" + - "tox -e py35-old,codecov" + label: ":python: 3.5 / SQLite / Old Deps" env: TRIAL_FLAGS: "-j 2" plugins: - docker#v3.0.1: - image: "python:2.7" + image: "python:3.5" propagate-environment: true retry: automatic: @@ -114,16 +116,16 @@ steps: - exit_status: 2 limit: 2 - - command: - - "python -m pip install tox" - - "tox -e py27-old,codecov" - label: ":python: 2.7 / SQLite / Old Deps" + - label: ":python: 3.5 / :postgres: 9.4" env: - TRIAL_FLAGS: "-j 2" + TRIAL_FLAGS: "-j 4" + command: + - "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'" plugins: - - docker#v3.0.1: - image: "python:2.7" - propagate-environment: true + - docker-compose#v2.1.0: + run: testenv + config: + - .buildkite/docker-compose.py35.pg94.yaml retry: automatic: - exit_status: -1 @@ -131,16 +133,16 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 2.7 / :postgres: 9.4" + - label: ":python: 3.5 / :postgres: 9.5" env: TRIAL_FLAGS: "-j 4" command: - - "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'" + - "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'" plugins: - docker-compose#v2.1.0: run: testenv config: - - .buildkite/docker-compose.py27.pg94.yaml + - .buildkite/docker-compose.py35.pg95.yaml retry: automatic: - exit_status: -1 @@ -148,16 +150,16 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 2.7 / :postgres: 9.5" + - label: ":python: 3.7 / :postgres: 9.5" env: TRIAL_FLAGS: "-j 4" command: - - "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'" + - "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'" plugins: - docker-compose#v2.1.0: run: testenv config: - - .buildkite/docker-compose.py27.pg95.yaml + - .buildkite/docker-compose.py37.pg95.yaml retry: automatic: - exit_status: -1 @@ -165,16 +167,16 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 3.5 / :postgres: 9.4" + - label: ":python: 3.7 / :postgres: 11" env: TRIAL_FLAGS: "-j 4" command: - - "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'" + - "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'" plugins: - docker-compose#v2.1.0: run: testenv config: - - .buildkite/docker-compose.py35.pg94.yaml + - .buildkite/docker-compose.py37.pg11.yaml retry: automatic: - exit_status: -1 @@ -182,16 +184,17 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 3.5 / :postgres: 9.5" - env: - TRIAL_FLAGS: "-j 4" + + - label: "SyTest - :python: 3.5 / SQLite / Monolith" + agents: + queue: "medium" command: - - "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'" + - "bash .buildkite/merge_base_branch.sh" + - "bash .buildkite/synapse_sytest.sh" plugins: - - docker-compose#v2.1.0: - run: testenv - config: - - .buildkite/docker-compose.py35.pg95.yaml + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:py35" + propagate-environment: true retry: automatic: - exit_status: -1 @@ -199,16 +202,18 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 3.7 / :postgres: 9.5" + - label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith" + agents: + queue: "medium" env: - TRIAL_FLAGS: "-j 4" + POSTGRES: "1" command: - - "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'" + - "bash .buildkite/merge_base_branch.sh" + - "bash .buildkite/synapse_sytest.sh" plugins: - - docker-compose#v2.1.0: - run: testenv - config: - - .buildkite/docker-compose.py37.pg95.yaml + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:py35" + propagate-environment: true retry: automatic: - exit_status: -1 @@ -216,16 +221,20 @@ steps: - exit_status: 2 limit: 2 - - label: ":python: 3.7 / :postgres: 11" + - label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers" + agents: + queue: "medium" env: - TRIAL_FLAGS: "-j 4" + POSTGRES: "1" + WORKERS: "1" command: - - "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'" + - "bash .buildkite/merge_base_branch.sh" + - "bash .buildkite/synapse_sytest.sh" plugins: - - docker-compose#v2.1.0: - run: testenv - config: - - .buildkite/docker-compose.py37.pg11.yaml + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:py35" + propagate-environment: true + soft_fail: true retry: automatic: - exit_status: -1 diff --git a/.buildkite/synapse_sytest.sh b/.buildkite/synapse_sytest.sh new file mode 100644
index 0000000000..3011b88bb7 --- /dev/null +++ b/.buildkite/synapse_sytest.sh
@@ -0,0 +1,145 @@ +#!/bin/bash +# +# Fetch sytest, and then run the tests for synapse. The entrypoint for the +# sytest-synapse docker images. + +set -ex + +if [ -n "$BUILDKITE" ] +then + SYNAPSE_DIR=`pwd` +else + SYNAPSE_DIR="/src" +fi + +# Attempt to find a sytest to use. +# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image. +if [ -d "/sytest" ]; then + # If the user has mounted in a SyTest checkout, use that. + echo "Using local sytests..." + + # create ourselves a working directory and dos2unix some scripts therein + mkdir -p /work/jenkins + for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do + dos2unix -n "/sytest/$i" "/work/$i" + done + ln -sf /sytest/tests /work + ln -sf /sytest/keys /work + SYTEST_LIB="/sytest/lib" +else + if [ -n "BUILDKITE_BRANCH" ] + then + branch_name=$BUILDKITE_BRANCH + else + # Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch. + branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop" + fi + + # Try and fetch the branch + echo "Trying to get same-named sytest branch..." + wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || { + # Probably a 404, fall back to develop + echo "Using develop instead..." + wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz + } + + mkdir -p /work + tar -C /work --strip-components=1 -xf sytest.tar.gz + SYTEST_LIB="/work/lib" +fi + +cd /work + +# PostgreSQL setup +if [ -n "$POSTGRES" ] +then + export PGUSER=postgres + export POSTGRES_DB_1=pg1 + export POSTGRES_DB_2=pg2 + + # Start the database + su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres + + # Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse + jenkins/prep_sytest_for_postgres.sh + + # Make the test databases for the two Synapse servers that will be spun up + su -c 'psql -c "CREATE DATABASE pg1;"' postgres + su -c 'psql -c "CREATE DATABASE pg2;"' postgres + +fi + +if [ -n "$OFFLINE" ]; then + # if we're in offline mode, just put synapse into the virtualenv, and + # hope that the deps are up-to-date. + # + # (`pip install -e` likes to reinstall setuptools even if it's already installed, + # so we just run setup.py explicitly.) + # + (cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop) +else + # We've already created the virtualenv, but lets double check we have all + # deps. + /venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR + /venv/bin/pip install -q --upgrade --no-cache-dir \ + lxml psycopg2 coverage codecov tap.py + + # Make sure all Perl deps are installed -- this is done in the docker build + # so will only install packages added since the last Docker build + ./install-deps.pl +fi + + +# Run the tests +>&2 echo "+++ Running tests" + +RUN_TESTS=( + perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all +) + +TEST_STATUS=0 + +if [ -n "$WORKERS" ]; then + RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py) +else + RUN_TESTS+=(-I Synapse) +fi + +"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$? + +if [ $TEST_STATUS -ne 0 ]; then + >&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS" +else + >&2 echo -e "run-tests \e[32mPASSED\e[0m" +fi + +>&2 echo "--- Copying assets" + +# Copy out the logs +mkdir -p /logs +cp results.tap /logs/results.tap +rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*" + +# Upload coverage to codecov and upload files, if running on Buildkite +if [ -n "$BUILDKITE" ] +then + /venv/bin/coverage combine || true + /venv/bin/coverage xml || true + /venv/bin/codecov -X gcov -f coverage.xml + + wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz + tar xvf buildkite.tar.gz + chmod +x ./buildkite-agent + + # Upload the files + ./buildkite-agent artifact upload "/logs/**/*.log*" + ./buildkite-agent artifact upload "/logs/results.tap" + + if [ $TEST_STATUS -ne 0 ]; then + # Annotate, if failure + /venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL" + fi +fi + + +exit $TEST_STATUS diff --git a/.circleci/config.yml b/.circleci/config.yml
index 137747dae3..e4fd5ffa6b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml
@@ -4,160 +4,23 @@ jobs: machine: true steps: - checkout - - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG}-py2 . - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 . - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - run: docker push matrixdotorg/synapse:${CIRCLE_TAG} - - run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py2 - run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3 dockerhubuploadlatest: machine: true steps: - checkout - - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest-py2 . - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 --build-arg PYTHON_VERSION=3.6 . - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - run: docker push matrixdotorg/synapse:latest - - run: docker push matrixdotorg/synapse:latest-py2 - run: docker push matrixdotorg/synapse:latest-py3 - sytestpy2: - docker: - - image: matrixdotorg/sytest-synapsepy2 - working_directory: /src - steps: - - checkout - - run: /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy2postgres: - docker: - - image: matrixdotorg/sytest-synapsepy2 - working_directory: /src - steps: - - checkout - - run: POSTGRES=1 /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy2merged: - docker: - - image: matrixdotorg/sytest-synapsepy2 - working_directory: /src - steps: - - checkout - - run: bash .circleci/merge_base_branch.sh - - run: /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy2postgresmerged: - docker: - - image: matrixdotorg/sytest-synapsepy2 - working_directory: /src - steps: - - checkout - - run: bash .circleci/merge_base_branch.sh - - run: POSTGRES=1 /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - - sytestpy3: - docker: - - image: matrixdotorg/sytest-synapsepy3 - working_directory: /src - steps: - - checkout - - run: /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy3postgres: - docker: - - image: matrixdotorg/sytest-synapsepy3 - working_directory: /src - steps: - - checkout - - run: POSTGRES=1 /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy3merged: - docker: - - image: matrixdotorg/sytest-synapsepy3 - working_directory: /src - steps: - - checkout - - run: bash .circleci/merge_base_branch.sh - - run: /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs - sytestpy3postgresmerged: - docker: - - image: matrixdotorg/sytest-synapsepy3 - working_directory: /src - steps: - - checkout - - run: bash .circleci/merge_base_branch.sh - - run: POSTGRES=1 /synapse_sytest.sh - - store_artifacts: - path: /logs - destination: logs - - store_test_results: - path: /logs workflows: version: 2 build: jobs: - - sytestpy2: - filters: - branches: - only: /develop|master|release-.*/ - - sytestpy2postgres: - filters: - branches: - only: /develop|master|release-.*/ - - sytestpy3: - filters: - branches: - only: /develop|master|release-.*/ - - sytestpy3postgres: - filters: - branches: - only: /develop|master|release-.*/ - - sytestpy2merged: - filters: - branches: - ignore: /develop|master|release-.*/ - - sytestpy2postgresmerged: - filters: - branches: - ignore: /develop|master|release-.*/ - - sytestpy3merged: - filters: - branches: - ignore: /develop|master|release-.*/ - - sytestpy3postgresmerged: - filters: - branches: - ignore: /develop|master|release-.*/ - dockerhubuploadrelease: filters: tags: diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644
index 0000000000..1a57677a0e --- /dev/null +++ b/.github/FUNDING.yml
@@ -0,0 +1,4 @@ +# One username per supported platform and one custom link +patreon: matrixdotorg +liberapay: matrixdotorg +custom: https://paypal.me/matrixdotorg diff --git a/changelog.d/5252.feature b/changelog.d/5252.feature new file mode 100644
index 0000000000..44115b0382 --- /dev/null +++ b/changelog.d/5252.feature
@@ -0,0 +1 @@ +Add monthly active users to phonehome stats. diff --git a/changelog.d/5325.bugfix b/changelog.d/5325.bugfix new file mode 100644
index 0000000000..b9413388f5 --- /dev/null +++ b/changelog.d/5325.bugfix
@@ -0,0 +1 @@ +Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. diff --git a/changelog.d/5363.feature b/changelog.d/5363.feature new file mode 100644
index 0000000000..803fe3fc37 --- /dev/null +++ b/changelog.d/5363.feature
@@ -0,0 +1 @@ +Allow expired user to trigger renewal email sending manually. diff --git a/changelog.d/5378.misc b/changelog.d/5378.misc new file mode 100644
index 0000000000..365e49d634 --- /dev/null +++ b/changelog.d/5378.misc
@@ -0,0 +1 @@ +Track deactivated accounts in the database. diff --git a/changelog.d/5381.misc b/changelog.d/5381.misc new file mode 100644
index 0000000000..bbf70a0445 --- /dev/null +++ b/changelog.d/5381.misc
@@ -0,0 +1 @@ +Clean up code for sending federation EDUs. diff --git a/changelog.d/5382.misc b/changelog.d/5382.misc new file mode 100644
index 0000000000..060cbba2a9 --- /dev/null +++ b/changelog.d/5382.misc
@@ -0,0 +1 @@ +Add a sponsor button to the repo. diff --git a/changelog.d/5383.misc b/changelog.d/5383.misc new file mode 100644
index 0000000000..9dd5d1df93 --- /dev/null +++ b/changelog.d/5383.misc
@@ -0,0 +1 @@ +Don't log non-200 responses from federation queries as exceptions. diff --git a/changelog.d/5384.feature b/changelog.d/5384.feature new file mode 100644
index 0000000000..9497f521c8 --- /dev/null +++ b/changelog.d/5384.feature
@@ -0,0 +1 @@ +Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5386.misc b/changelog.d/5386.misc new file mode 100644
index 0000000000..060cbba2a9 --- /dev/null +++ b/changelog.d/5386.misc
@@ -0,0 +1 @@ +Add a sponsor button to the repo. diff --git a/changelog.d/5387.bugfix b/changelog.d/5387.bugfix new file mode 100644
index 0000000000..2c6c94efc4 --- /dev/null +++ b/changelog.d/5387.bugfix
@@ -0,0 +1 @@ +Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. diff --git a/changelog.d/5388.bugfix b/changelog.d/5388.bugfix new file mode 100644
index 0000000000..503e830915 --- /dev/null +++ b/changelog.d/5388.bugfix
@@ -0,0 +1 @@ +Fix email notifications for unnamed rooms with multiple people. diff --git a/changelog.d/5389.bugfix b/changelog.d/5389.bugfix new file mode 100644
index 0000000000..dd648e26c8 --- /dev/null +++ b/changelog.d/5389.bugfix
@@ -0,0 +1 @@ +Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker. diff --git a/changelog.d/5390.bugfix b/changelog.d/5390.bugfix new file mode 100644
index 0000000000..e7b7483cf2 --- /dev/null +++ b/changelog.d/5390.bugfix
@@ -0,0 +1 @@ +Fix handling of failures fetching remote content to not log failures as exceptions. diff --git a/changelog.d/5394.bugfix b/changelog.d/5394.bugfix new file mode 100644
index 0000000000..2ad9fbe82c --- /dev/null +++ b/changelog.d/5394.bugfix
@@ -0,0 +1 @@ +Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. diff --git a/changelog.d/5412.feature b/changelog.d/5412.feature new file mode 100644
index 0000000000..ec1503860a --- /dev/null +++ b/changelog.d/5412.feature
@@ -0,0 +1 @@ +Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. \ No newline at end of file diff --git a/changelog.d/5425.removal b/changelog.d/5425.removal new file mode 100644
index 0000000000..30022ee63d --- /dev/null +++ b/changelog.d/5425.removal
@@ -0,0 +1 @@ +Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run. diff --git a/changelog.d/5440.feature b/changelog.d/5440.feature new file mode 100644
index 0000000000..63d9b58734 --- /dev/null +++ b/changelog.d/5440.feature
@@ -0,0 +1 @@ +Allow server admins to define implementations of extra rules for allowing or denying incoming events. diff --git a/changelog.d/5447.misc b/changelog.d/5447.misc new file mode 100644
index 0000000000..dd52068404 --- /dev/null +++ b/changelog.d/5447.misc
@@ -0,0 +1 @@ +Update federation_client dev script to support `.well-known` and work with python3. diff --git a/changelog.d/5458.feature b/changelog.d/5458.feature new file mode 100644
index 0000000000..9497f521c8 --- /dev/null +++ b/changelog.d/5458.feature
@@ -0,0 +1 @@ +Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5459.misc b/changelog.d/5459.misc new file mode 100644
index 0000000000..904e45f66b --- /dev/null +++ b/changelog.d/5459.misc
@@ -0,0 +1 @@ +SyTest has been moved to Buildkite. diff --git a/changelog.d/5460.misc b/changelog.d/5460.misc new file mode 100644
index 0000000000..badc8bb79a --- /dev/null +++ b/changelog.d/5460.misc
@@ -0,0 +1 @@ +Demo script now uses python3. diff --git a/changelog.d/5461.feature b/changelog.d/5461.feature new file mode 100644
index 0000000000..9497f521c8 --- /dev/null +++ b/changelog.d/5461.feature
@@ -0,0 +1 @@ +Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5464.bugfix b/changelog.d/5464.bugfix new file mode 100644
index 0000000000..8278d1bce9 --- /dev/null +++ b/changelog.d/5464.bugfix
@@ -0,0 +1 @@ +Fix missing invite state after exchanging 3PID invites over federaton. diff --git a/changelog.d/5465.misc b/changelog.d/5465.misc new file mode 100644
index 0000000000..af5f0f8f45 --- /dev/null +++ b/changelog.d/5465.misc
@@ -0,0 +1,2 @@ +Track deactivated accounts in the database. + diff --git a/demo/start.sh b/demo/start.sh
index c4a1328a6f..5c3a8fe61f 100755 --- a/demo/start.sh +++ b/demo/start.sh
@@ -21,7 +21,7 @@ for port in 8080 8081 8082; do pushd demo/$port #rm $DIR/etc/$port.config - python -m synapse.app.homeserver \ + python3 -m synapse.app.homeserver \ --generate-config \ -H "localhost:$https_port" \ --config-path "$DIR/etc/$port.config" \ @@ -55,7 +55,7 @@ for port in 8080 8081 8082; do echo "report_stats: false" >> $DIR/etc/$port.config fi - python -m synapse.app.homeserver \ + python3 -m synapse.app.homeserver \ --config-path "$DIR/etc/$port.config" \ -D \ -vv \ diff --git a/docker/README.md b/docker/README.md
index df5d0151e2..5a596eecb9 100644 --- a/docker/README.md +++ b/docker/README.md
@@ -14,7 +14,7 @@ This image is designed to run either with an automatically generated configuration file or with a custom configuration that requires manual editing. An easy way to make use of this image is via docker-compose. See the -[contrib/docker](../contrib/docker) section of the synapse project for +[contrib/docker](https://github.com/matrix-org/synapse/tree/master/contrib/docker) section of the synapse project for examples. ### Without Compose (harder) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 4d7e6f3eb5..bd80d97a93 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml
@@ -1351,3 +1351,16 @@ password_config: # alias: "*" # room_id: "*" # action: allow + + +# Server admins can define a Python module that implements extra rules for +# allowing or denying incoming events. In order to work, this module needs to +# override the methods defined in synapse/events/third_party_rules.py. +# +# This feature is designed to be used in closed federations only, where each +# participating server enforces the same rules. +# +#third_party_event_rules: +# module: "my_custom_project.SuperRulesSet" +# config: +# example_option: 'things' diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py
index e0287c8c6c..41e7b24418 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py
@@ -21,7 +21,8 @@ import argparse import base64 import json import sys -from urlparse import urlparse, urlunparse + +from six.moves.urllib import parse as urlparse import nacl.signing import requests @@ -145,7 +146,7 @@ def request_json(method, origin_name, origin_key, destination, path, content): for key, sig in signed_json["signatures"][origin_name].items(): header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig) - authorization_headers.append(bytes(header)) + authorization_headers.append(header.encode("ascii")) print("Authorization: %s" % header, file=sys.stderr) dest = "matrix://%s%s" % (destination, path) @@ -250,7 +251,7 @@ def read_args_from_config(args): class MatrixConnectionAdapter(HTTPAdapter): @staticmethod - def lookup(s): + def lookup(s, skip_well_known=False): if s[-1] == ']': # ipv6 literal (with no port) return s, 8448 @@ -263,19 +264,51 @@ class MatrixConnectionAdapter(HTTPAdapter): raise ValueError("Invalid host:port '%s'" % s) return out[0], port + # try a .well-known lookup + if not skip_well_known: + well_known = MatrixConnectionAdapter.get_well_known(s) + if well_known: + return MatrixConnectionAdapter.lookup( + well_known, skip_well_known=True + ) + try: srv = srvlookup.lookup("matrix", "tcp", s)[0] return srv.host, srv.port except Exception: return s, 8448 + @staticmethod + def get_well_known(server_name): + uri = "https://%s/.well-known/matrix/server" % (server_name, ) + print("fetching %s" % (uri, ), file=sys.stderr) + + try: + resp = requests.get(uri) + if resp.status_code != 200: + print("%s gave %i" % (uri, resp.status_code), file=sys.stderr) + return None + + parsed_well_known = resp.json() + if not isinstance(parsed_well_known, dict): + raise Exception("not a dict") + if "m.server" not in parsed_well_known: + raise Exception("Missing key 'm.server'") + new_name = parsed_well_known['m.server'] + print("well-known lookup gave %s" % (new_name, ), file=sys.stderr) + return new_name + + except Exception as e: + print("Invalid response from %s: %s" % (uri, e, ), file=sys.stderr) + return None + def get_connection(self, url, proxies=None): - parsed = urlparse(url) + parsed = urlparse.urlparse(url) (host, port) = self.lookup(parsed.netloc) netloc = "%s:%d" % (host, port) print("Connecting to %s" % (netloc,), file=sys.stderr) - url = urlunparse( + url = urlparse.urlunparse( ("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment) ) return super(MatrixConnectionAdapter, self).get_connection(url, proxies) diff --git a/scripts/generate_signing_key.py b/scripts/generate_signing_key.py
index ba3ba97395..36e9140b50 100755 --- a/scripts/generate_signing_key.py +++ b/scripts/generate_signing_key.py
@@ -16,7 +16,7 @@ import argparse import sys -from signedjson.key import write_signing_keys, generate_signing_key +from signedjson.key import generate_signing_key, write_signing_keys from synapse.util.stringutils import random_string diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 41be9c9220..b6ba19c776 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db
@@ -54,6 +54,7 @@ BOOLEAN_COLUMNS = { "group_roles": ["is_public"], "local_group_membership": ["is_publicised", "is_admin"], "e2e_room_keys": ["is_verified"], + "account_validity": ["email_sent"], } diff --git a/setup.py b/setup.py
index 55663e9cac..3492cdc5a0 100755 --- a/setup.py +++ b/setup.py
@@ -102,6 +102,16 @@ setup( include_package_data=True, zip_safe=False, long_description=long_description, + python_requires='~=3.5', + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Topic :: Communications :: Chat', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python :: 3 :: Only', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + ], scripts=["synctl"] + glob.glob("scripts/*"), cmdclass={'test': TestCommand}, ) diff --git a/synapse/__init__.py b/synapse/__init__.py
index 5bc24863d9..0c01546789 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py
@@ -17,6 +17,13 @@ """ This is a reference implementation of a Matrix home server. """ +import sys + +# Check that we're not running on an unsupported Python version. +if sys.version_info < (3, 5): + print("Synapse requires Python 3.5 or above.") + sys.exit(1) + try: from twisted.internet import protocol from twisted.internet.protocol import Factory diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 0c6c93a87b..79e2808dc5 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py
@@ -184,11 +184,22 @@ class Auth(object): return event_auth.get_public_keys(invite_event) @defer.inlineCallbacks - def get_user_by_req(self, request, allow_guest=False, rights="access"): + def get_user_by_req( + self, + request, + allow_guest=False, + rights="access", + allow_expired=False, + ): """ Get a registered user's ID. Args: request - An HTTP request with an access_token query parameter. + allow_expired - Whether to allow the request through even if the account is + expired. If true, Synapse will still require an access token to be + provided but won't check if the account it belongs to has expired. This + works thanks to /login delivering access tokens regardless of accounts' + expiration. Returns: defer.Deferred: resolves to a ``synapse.types.Requester`` object Raises: @@ -229,7 +240,7 @@ class Auth(object): is_guest = user_info["is_guest"] # Deny the request if the user account has expired. - if self._account_validity.enabled: + if self._account_validity.enabled and not allow_expired: user_id = user.to_string() expiration_ts = yield self.store.get_expiration_ts_for_user(user_id) if expiration_ts is not None and self.clock.time_msec() >= expiration_ts: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 1045d28949..b27b12e73d 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py
@@ -540,6 +540,7 @@ def run(hs): stats["total_room_count"] = room_count stats["daily_active_users"] = yield hs.get_datastore().count_daily_users() + stats["monthly_active_users"] = yield hs.get_datastore().count_monthly_users() stats["daily_active_rooms"] = yield hs.get_datastore().count_daily_active_rooms() stats["daily_messages"] = yield hs.get_datastore().count_daily_messages() diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index ae04252906..86018dfcce 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py
@@ -19,15 +19,12 @@ from __future__ import print_function # This file can't be called email.py because if it is, we cannot: import email.utils -import logging import os import pkg_resources from ._base import Config, ConfigError -logger = logging.getLogger(__name__) - class EmailConfig(Config): def read_config(self, config): @@ -85,10 +82,12 @@ class EmailConfig(Config): self.email_password_reset_behaviour = ( "remote" if email_trust_identity_server_for_password_resets else "local" ) + self.password_resets_were_disabled_due_to_email_config = False if self.email_password_reset_behaviour == "local" and email_config == {}: - logger.warn( - "User password resets have been disabled due to lack of email config" - ) + # We cannot warn the user this has happened here + # Instead do so when a user attempts to reset their password + self.password_resets_were_disabled_due_to_email_config = True + self.email_password_reset_behaviour = "off" # Get lifetime of a validation token in milliseconds diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 5c4fc8ff21..acadef4fd3 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py
@@ -38,6 +38,7 @@ from .server import ServerConfig from .server_notices_config import ServerNoticesConfig from .spam_checker import SpamCheckerConfig from .stats import StatsConfig +from .third_party_event_rules import ThirdPartyRulesConfig from .tls import TlsConfig from .user_directory import UserDirectoryConfig from .voip import VoipConfig @@ -73,5 +74,6 @@ class HomeServerConfig( StatsConfig, ServerNoticesConfig, RoomDirectoryConfig, + ThirdPartyRulesConfig, ): pass diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py new file mode 100644
index 0000000000..a89dd5f98a --- /dev/null +++ b/synapse/config/third_party_event_rules.py
@@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.util.module_loader import load_module + +from ._base import Config + + +class ThirdPartyRulesConfig(Config): + def read_config(self, config): + self.third_party_event_rules = None + + provider = config.get("third_party_event_rules", None) + if provider is not None: + self.third_party_event_rules = load_module(provider) + + def default_config(self, **kwargs): + return """\ + # Server admins can define a Python module that implements extra rules for + # allowing or denying incoming events. In order to work, this module needs to + # override the methods defined in synapse/events/third_party_rules.py. + # + # This feature is designed to be used in closed federations only, where each + # participating server enforces the same rules. + # + #third_party_event_rules: + # module: "my_custom_project.SuperRulesSet" + # config: + # example_option: 'things' + """ diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py new file mode 100644
index 0000000000..9f98d51523 --- /dev/null +++ b/synapse/events/third_party_rules.py
@@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + + +class ThirdPartyEventRules(object): + """Allows server admins to provide a Python module implementing an extra set of rules + to apply when processing events. + + This is designed to help admins of closed federations with enforcing custom + behaviours. + """ + + def __init__(self, hs): + self.third_party_rules = None + + self.store = hs.get_datastore() + + module = None + config = None + if hs.config.third_party_event_rules: + module, config = hs.config.third_party_event_rules + + if module is not None: + self.third_party_rules = module(config=config) + + @defer.inlineCallbacks + def check_event_allowed(self, event, context): + """Check if a provided event should be allowed in the given context. + + Args: + event (synapse.events.EventBase): The event to be checked. + context (synapse.events.snapshot.EventContext): The context of the event. + + Returns: + defer.Deferred(bool), True if the event should be allowed, False if not. + """ + if self.third_party_rules is None: + defer.returnValue(True) + + prev_state_ids = yield context.get_prev_state_ids(self.store) + + # Retrieve the state events from the database. + state_events = {} + for key, event_id in prev_state_ids.items(): + state_events[key] = yield self.store.get_event(event_id, allow_none=True) + + ret = yield self.third_party_rules.check_event_allowed(event, state_events) + defer.returnValue(ret) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py
index 564c57203d..22a2735405 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py
@@ -189,11 +189,21 @@ class PerDestinationQueue(object): pending_pdus = [] while True: - device_message_edus, device_stream_id, dev_list_id = ( - # We have to keep 2 free slots for presence and rr_edus - yield self._get_new_device_messages(MAX_EDUS_PER_TRANSACTION - 2) + # We have to keep 2 free slots for presence and rr_edus + limit = MAX_EDUS_PER_TRANSACTION - 2 + + device_update_edus, dev_list_id = ( + yield self._get_device_update_edus(limit) + ) + + limit -= len(device_update_edus) + + to_device_edus, device_stream_id = ( + yield self._get_to_device_message_edus(limit) ) + pending_edus = device_update_edus + to_device_edus + # BEGIN CRITICAL SECTION # # In order to avoid a race condition, we need to make sure that @@ -208,10 +218,6 @@ class PerDestinationQueue(object): # We can only include at most 50 PDUs per transactions pending_pdus, self._pending_pdus = pending_pdus[:50], pending_pdus[50:] - pending_edus = [] - - # We can only include at most 100 EDUs per transactions - # rr_edus and pending_presence take at most one slot each pending_edus.extend(self._get_rr_edus(force_flush=False)) pending_presence = self._pending_presence self._pending_presence = {} @@ -232,7 +238,6 @@ class PerDestinationQueue(object): ) ) - pending_edus.extend(device_message_edus) pending_edus.extend( self._pop_pending_edus(MAX_EDUS_PER_TRANSACTION - len(pending_edus)) ) @@ -272,10 +277,13 @@ class PerDestinationQueue(object): sent_edus_by_type.labels(edu.edu_type).inc() # Remove the acknowledged device messages from the database # Only bother if we actually sent some device messages - if device_message_edus: + if to_device_edus: yield self._store.delete_device_msgs_for_remote( self._destination, device_stream_id ) + + # also mark the device updates as sent + if device_update_edus: logger.info( "Marking as sent %r %r", self._destination, dev_list_id ) @@ -347,7 +355,7 @@ class PerDestinationQueue(object): return pending_edus @defer.inlineCallbacks - def _get_new_device_messages(self, limit): + def _get_device_update_edus(self, limit): last_device_list = self._last_device_list_stream_id # Retrieve list of new device updates to send to the destination @@ -366,15 +374,19 @@ class PerDestinationQueue(object): assert len(edus) <= limit, "get_devices_by_remote returned too many EDUs" + defer.returnValue((edus, now_stream_id)) + + @defer.inlineCallbacks + def _get_to_device_message_edus(self, limit): last_device_stream_id = self._last_device_stream_id to_device_stream_id = self._store.get_to_device_stream_token() contents, stream_id = yield self._store.get_new_device_msgs_for_remote( self._destination, last_device_stream_id, to_device_stream_id, - limit - len(edus), + limit, ) - edus.extend( + edus = [ Edu( origin=self._server_name, destination=self._destination, @@ -382,6 +394,6 @@ class PerDestinationQueue(object): content=content, ) for content in contents - ) + ] - defer.returnValue((edus, stream_id, now_stream_id)) + defer.returnValue((edus, stream_id)) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py
index e5dda1975f..3ba74001d8 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py
@@ -42,7 +42,7 @@ from signedjson.sign import sign_json from twisted.internet import defer -from synapse.api.errors import RequestSendFailed, SynapseError +from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import get_domain_from_id from synapse.util.logcontext import run_in_background @@ -132,9 +132,10 @@ class GroupAttestionRenewer(object): self.is_mine_id = hs.is_mine_id self.attestations = hs.get_groups_attestation_signing() - self._renew_attestations_loop = self.clock.looping_call( - self._start_renew_attestations, 30 * 60 * 1000, - ) + if not hs.config.worker_app: + self._renew_attestations_loop = self.clock.looping_call( + self._start_renew_attestations, 30 * 60 * 1000, + ) @defer.inlineCallbacks def on_renew_attestation(self, group_id, user_id, content): @@ -194,7 +195,7 @@ class GroupAttestionRenewer(object): yield self.store.update_attestation_renewal( group_id, user_id, attestation ) - except RequestSendFailed as e: + except (RequestSendFailed, HttpResponseException) as e: logger.warning( "Failed to renew attestation of %r in %r: %s", user_id, group_id, e, diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 261446517d..5e0b92eb1c 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py
@@ -110,6 +110,9 @@ class AccountValidityHandler(object): # Stop right here if the user doesn't have at least one email address. # In this case, they will have to ask their server admin to renew their # account manually. + # We don't need to do a specific check to make sure the account isn't + # deactivated, as a deactivated account isn't supposed to have any + # email address attached to it. if not addresses: return diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index 6a91f7698e..7378b56c1d 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py
@@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2017, 2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,6 +43,8 @@ class DeactivateAccountHandler(BaseHandler): # it left off (if it has work left to do). hs.get_reactor().callWhenRunning(self._start_user_parting) + self._account_validity_enabled = hs.config.account_validity.enabled + @defer.inlineCallbacks def deactivate_account(self, user_id, erase_data, id_server=None): """Deactivate a user's account @@ -114,6 +117,13 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() + # Remove all information on the user from the account_validity table. + if self._account_validity_enabled: + yield self.store.delete_account_validity_for_user(user_id) + + # Mark the user as deactivated. + yield self.store.set_user_deactivated_status(user_id, True) + defer.returnValue(identity_server_supports_unbinding) def _start_user_parting(self): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index ac5ca79143..93e064cda3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py
@@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,6 +34,7 @@ from synapse.api.constants import EventTypes, Membership, RejectedReason from synapse.api.errors import ( AuthError, CodeMessageException, + Codes, FederationDeniedError, FederationError, RequestSendFailed, @@ -127,6 +129,8 @@ class FederationHandler(BaseHandler): self.room_queues = {} self._room_pdu_linearizer = Linearizer("fed_room_pdu") + self.third_party_event_rules = hs.get_third_party_event_rules() + @defer.inlineCallbacks def on_receive_pdu( self, origin, pdu, sent_to_us_directly=False, @@ -1258,6 +1262,15 @@ class FederationHandler(BaseHandler): logger.warn("Failed to create join %r because %s", event, e) raise e + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Creation of join %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_join_request` yield self.auth.check_from_context( @@ -1300,6 +1313,15 @@ class FederationHandler(BaseHandler): origin, event ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Sending of join %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + logger.debug( "on_send_join_request: After _handle_new_event: %s, sigs: %s", event.event_id, @@ -1458,6 +1480,15 @@ class FederationHandler(BaseHandler): builder=builder, ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.warning("Creation of leave %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + try: # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_leave_request` @@ -1484,10 +1515,19 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = False - yield self._handle_new_event( + context = yield self._handle_new_event( origin, event ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Sending of leave %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + logger.debug( "on_send_leave_request: After _handle_new_event: %s, sigs: %s", event.event_id, @@ -2550,6 +2590,18 @@ class FederationHandler(BaseHandler): builder=builder ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info( + "Creation of threepid invite %s forbidden by third-party rules", + event, + ) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + event, context = yield self.add_display_name_to_third_party_invite( room_version, event_dict, event, context ) @@ -2598,6 +2650,18 @@ class FederationHandler(BaseHandler): builder=builder, ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.warning( + "Exchange of threepid invite %s forbidden by third-party rules", + event, + ) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + event, context = yield self.add_display_name_to_third_party_invite( room_version, event_dict, event, context ) @@ -2613,12 +2677,6 @@ class FederationHandler(BaseHandler): # though the sender isn't a local user. event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender) - # XXX we send the invite here, but send_membership_event also sends it, - # so we end up making two requests. I think this is redundant. - returned_invite = yield self.send_invite(origin, event) - # TODO: Make sure the signatures actually are correct. - event.signatures.update(returned_invite.signatures) - member_handler = self.hs.get_room_member_handler() yield member_handler.send_membership_event(None, event, context) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index 02c508acec..f60ace02e8 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py
@@ -49,9 +49,7 @@ def _create_rerouter(func_name): def http_response_errback(failure): failure.trap(HttpResponseException) e = failure.value - if e.code == 403: - raise e.to_synapse_error() - return failure + raise e.to_synapse_error() def request_failed_errback(failure): failure.trap(RequestSendFailed) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index f80386b5c9..0ba66c3b4d 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py
@@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# Copyright 2014 - 2016 OpenMarket Ltd -# Copyright 2017 - 2018 New Vector Ltd +# Copyright 2014-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -248,6 +249,7 @@ class EventCreationHandler(object): self.action_generator = hs.get_action_generator() self.spam_checker = hs.get_spam_checker() + self.third_party_event_rules = hs.get_third_party_event_rules() self._block_events_without_consent_error = ( self.config.block_events_without_consent_error @@ -658,6 +660,14 @@ class EventCreationHandler(object): else: room_version = yield self.store.get_room_version(event.room_id) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + try: yield self.auth.check_from_context(room_version, event, context) except AuthError as err: diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index a5fc6c5dbf..3e04233394 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py
@@ -15,12 +15,15 @@ import logging +from six import raise_from + from twisted.internet import defer from synapse.api.errors import ( AuthError, - CodeMessageException, Codes, + HttpResponseException, + RequestSendFailed, StoreError, SynapseError, ) @@ -85,10 +88,10 @@ class BaseProfileHandler(BaseHandler): ignore_backoff=True, ) defer.returnValue(result) - except CodeMessageException as e: - if e.code != 404: - logger.exception("Failed to get displayname") - raise + except RequestSendFailed as e: + raise_from(SynapseError(502, "Failed to fetch profile"), e) + except HttpResponseException as e: + raise e.to_synapse_error() @defer.inlineCallbacks def get_profile_from_cache(self, user_id): @@ -142,10 +145,10 @@ class BaseProfileHandler(BaseHandler): }, ignore_backoff=True, ) - except CodeMessageException as e: - if e.code != 404: - logger.exception("Failed to get displayname") - raise + except RequestSendFailed as e: + raise_from(SynapseError(502, "Failed to fetch profile"), e) + except HttpResponseException as e: + raise e.to_synapse_error() defer.returnValue(result["displayname"]) @@ -208,10 +211,10 @@ class BaseProfileHandler(BaseHandler): }, ignore_backoff=True, ) - except CodeMessageException as e: - if e.code != 404: - logger.exception("Failed to get avatar_url") - raise + except RequestSendFailed as e: + raise_from(SynapseError(502, "Failed to fetch profile"), e) + except HttpResponseException as e: + raise e.to_synapse_error() defer.returnValue(result["avatar_url"]) diff --git a/synapse/http/client.py b/synapse/http/client.py
index 77fe68818b..5c073fff07 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py
@@ -17,7 +17,7 @@ import logging from io import BytesIO -from six import text_type +from six import raise_from, text_type from six.moves import urllib import treq @@ -542,10 +542,15 @@ class SimpleHttpClient(object): length = yield make_deferred_yieldable( _readBodyToFile(response, output_stream, max_size) ) + except SynapseError: + # This can happen e.g. because the body is too large. + raise except Exception as e: - logger.exception("Failed to download body") - raise SynapseError( - 502, ("Failed to download remote body: %s" % e), Codes.UNKNOWN + raise_from( + SynapseError( + 502, ("Failed to download remote body: %s" % e), + ), + e ) defer.returnValue( diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index ef48984fdd..8aee14a8a8 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py
@@ -25,7 +25,7 @@ import six import attr from prometheus_client import Counter, Gauge, Histogram -from prometheus_client.core import REGISTRY, GaugeMetricFamily +from prometheus_client.core import REGISTRY, GaugeMetricFamily, HistogramMetricFamily from twisted.internet import reactor @@ -40,7 +40,6 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") class RegistryProxy(object): - @staticmethod def collect(): for metric in REGISTRY.collect(): @@ -63,10 +62,7 @@ class LaterGauge(object): try: calls = self.caller() except Exception: - logger.exception( - "Exception running callback for LaterGauge(%s)", - self.name, - ) + logger.exception("Exception running callback for LaterGauge(%s)", self.name) yield g return @@ -116,9 +112,7 @@ class InFlightGauge(object): # Create a class which have the sub_metrics values as attributes, which # default to 0 on initialization. Used to pass to registered callbacks. self._metrics_class = attr.make_class( - "_MetricsEntry", - attrs={x: attr.ib(0) for x in sub_metrics}, - slots=True, + "_MetricsEntry", attrs={x: attr.ib(0) for x in sub_metrics}, slots=True ) # Counts number of in flight blocks for a given set of label values @@ -157,7 +151,9 @@ class InFlightGauge(object): Note: may be called by a separate thread. """ - in_flight = GaugeMetricFamily(self.name + "_total", self.desc, labels=self.labels) + in_flight = GaugeMetricFamily( + self.name + "_total", self.desc, labels=self.labels + ) metrics_by_key = {} @@ -179,7 +175,9 @@ class InFlightGauge(object): yield in_flight for name in self.sub_metrics: - gauge = GaugeMetricFamily("_".join([self.name, name]), "", labels=self.labels) + gauge = GaugeMetricFamily( + "_".join([self.name, name]), "", labels=self.labels + ) for key, metrics in six.iteritems(metrics_by_key): gauge.add_metric(key, getattr(metrics, name)) yield gauge @@ -193,12 +191,74 @@ class InFlightGauge(object): all_gauges[self.name] = self +@attr.s(hash=True) +class BucketCollector(object): + """ + Like a Histogram, but allows buckets to be point-in-time instead of + incrementally added to. + + Args: + name (str): Base name of metric to be exported to Prometheus. + data_collector (callable -> dict): A synchronous callable that + returns a dict mapping bucket to number of items in the + bucket. If these buckets are not the same as the buckets + given to this class, they will be remapped into them. + buckets (list[float]): List of floats/ints of the buckets to + give to Prometheus. +Inf is ignored, if given. + + """ + + name = attr.ib() + data_collector = attr.ib() + buckets = attr.ib() + + def collect(self): + + # Fetch the data -- this must be synchronous! + data = self.data_collector() + + buckets = {} + + res = [] + for x in data.keys(): + for i, bound in enumerate(self.buckets): + if x <= bound: + buckets[bound] = buckets.get(bound, 0) + data[x] + + for i in self.buckets: + res.append([str(i), buckets.get(i, 0)]) + + res.append(["+Inf", sum(data.values())]) + + metric = HistogramMetricFamily( + self.name, + "", + buckets=res, + sum_value=sum([x * y for x, y in data.items()]), + ) + yield metric + + def __attrs_post_init__(self): + self.buckets = [float(x) for x in self.buckets if x != "+Inf"] + if self.buckets != sorted(self.buckets): + raise ValueError("Buckets not sorted") + + self.buckets = tuple(self.buckets) + + if self.name in all_gauges.keys(): + logger.warning("%s already registered, reregistering" % (self.name,)) + REGISTRY.unregister(all_gauges.pop(self.name)) + + REGISTRY.register(self) + all_gauges[self.name] = self + + # # Detailed CPU metrics # -class CPUMetrics(object): +class CPUMetrics(object): def __init__(self): ticks_per_sec = 100 try: @@ -237,13 +297,28 @@ gc_time = Histogram( "python_gc_time", "Time taken to GC (sec)", ["gen"], - buckets=[0.0025, 0.005, 0.01, 0.025, 0.05, 0.10, 0.25, 0.50, 1.00, 2.50, - 5.00, 7.50, 15.00, 30.00, 45.00, 60.00], + buckets=[ + 0.0025, + 0.005, + 0.01, + 0.025, + 0.05, + 0.10, + 0.25, + 0.50, + 1.00, + 2.50, + 5.00, + 7.50, + 15.00, + 30.00, + 45.00, + 60.00, + ], ) class GCCounts(object): - def collect(self): cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"]) for n, m in enumerate(gc.get_count()): @@ -279,9 +354,7 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions events_processed_counter = Counter("synapse_federation_client_events_processed", "") event_processing_loop_counter = Counter( - "synapse_event_processing_loop_count", - "Event processing loop iterations", - ["name"], + "synapse_event_processing_loop_count", "Event processing loop iterations", ["name"] ) event_processing_loop_room_count = Counter( @@ -311,7 +384,6 @@ last_ticked = time.time() class ReactorLastSeenMetric(object): - def collect(self): cm = GaugeMetricFamily( "python_twisted_reactor_last_seen", @@ -325,7 +397,6 @@ REGISTRY.register(ReactorLastSeenMetric()) def runUntilCurrentTimer(func): - @functools.wraps(func) def f(*args, **kwargs): now = reactor.seconds() diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py
index e8ee67401f..c89a8438a9 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py
@@ -114,6 +114,21 @@ class EmailPusher(object): run_as_background_process("emailpush.process", self._process) + def _pause_processing(self): + """Used by tests to temporarily pause processing of events. + + Asserts that its not currently processing. + """ + assert not self._is_processing + self._is_processing = True + + def _resume_processing(self): + """Used by tests to resume processing of events after pausing. + """ + assert self._is_processing + self._is_processing = False + self._start_processing() + @defer.inlineCallbacks def _process(self): # we should never get here if we are already processing @@ -215,6 +230,10 @@ class EmailPusher(object): @defer.inlineCallbacks def save_last_stream_ordering_and_success(self, last_stream_ordering): + if last_stream_ordering is None: + # This happens if we haven't yet processed anything + return + self.last_stream_ordering = last_stream_ordering yield self.store.update_pusher_last_stream_ordering_and_success( self.app_id, self.email, self.user_id, diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py
index eef6e18c2e..0c66702325 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py
@@ -162,6 +162,17 @@ def calculate_room_name(store, room_state_ids, user_id, fallback_to_members=True def descriptor_from_member_events(member_events): + """Get a description of the room based on the member events. + + Args: + member_events (Iterable[FrozenEvent]) + + Returns: + str + """ + + member_events = list(member_events) + if len(member_events) == 0: return "nobody" elif len(member_events) == 1: diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
index 40a7709c09..63c583565f 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py
@@ -60,6 +60,11 @@ class PusherPool: def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): + """Creates a new pusher and adds it to the pool + + Returns: + Deferred[EmailPusher|HttpPusher] + """ time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it @@ -103,7 +108,9 @@ class PusherPool: last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) - yield self.start_pusher_by_id(app_id, pushkey, user_id) + pusher = yield self.start_pusher_by_id(app_id, pushkey, user_id) + + defer.returnValue(pusher) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, @@ -184,7 +191,11 @@ class PusherPool: @defer.inlineCallbacks def start_pusher_by_id(self, app_id, pushkey, user_id): - """Look up the details for the given pusher, and start it""" + """Look up the details for the given pusher, and start it + + Returns: + Deferred[EmailPusher|HttpPusher|None]: The pusher started, if any + """ if not self._should_start_pushers: return @@ -192,13 +203,16 @@ class PusherPool: app_id, pushkey ) - p = None + pusher_dict = None for r in resultlist: if r['user_name'] == user_id: - p = r + pusher_dict = r - if p: - yield self._start_pusher(p) + pusher = None + if pusher_dict: + pusher = yield self._start_pusher(pusher_dict) + + defer.returnValue(pusher) @defer.inlineCallbacks def _start_pushers(self): @@ -224,7 +238,7 @@ class PusherPool: pusherdict (dict): Returns: - None + Deferred[EmailPusher|HttpPusher] """ try: p = self.pusher_factory.create_pusher(pusherdict) @@ -270,6 +284,8 @@ class PusherPool: p.on_started(have_notifs) + defer.returnValue(p) + @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 7dfa78dadb..11ace2bfb1 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py
@@ -44,7 +44,7 @@ REQUIREMENTS = [ "canonicaljson>=1.1.3", "signedjson>=1.0.0", "pynacl>=1.2.1", - "idna>=2", + "idna>=2.5", # validating SSL certs for IP addresses requires service_identity 18.1. "service_identity>=18.1.0", @@ -65,7 +65,7 @@ REQUIREMENTS = [ "sortedcontainers>=1.4.4", "psutil>=2.0.0", "pymacaroons>=0.13.0", - "msgpack>=0.5.0", + "msgpack>=0.5.2", "phonenumbers>=8.2.0", "six>=1.10", # prometheus_client 0.4.0 changed the format of counter metrics diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
index e81456ab2b..0a432a16fa 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py
@@ -17,11 +17,17 @@ import abc import logging import re +from six import raise_from from six.moves import urllib from twisted.internet import defer -from synapse.api.errors import CodeMessageException, HttpResponseException +from synapse.api.errors import ( + CodeMessageException, + HttpResponseException, + RequestSendFailed, + SynapseError, +) from synapse.util.caches.response_cache import ResponseCache from synapse.util.stringutils import random_string @@ -175,6 +181,8 @@ class ReplicationEndpoint(object): # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) raise e.to_synapse_error() + except RequestSendFailed as e: + raise_from(SynapseError(502, "Failed to talk to master"), e) defer.returnValue(result) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 7025f486e1..ab75f6c2b2 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py
@@ -67,7 +67,13 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): if self.config.email_password_reset_behaviour == "off": - raise SynapseError(400, "Password resets have been disabled on this server") + if self.config.password_resets_were_disabled_due_to_email_config: + logger.warn( + "User password resets have been disabled due to lack of email config" + ) + raise SynapseError( + 400, "Email-based password resets have been disabled on this server", + ) body = parse_json_object_from_request(request) @@ -195,9 +201,6 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): - if not self.config.email_password_reset_behaviour == "off": - raise SynapseError(400, "Password resets have been disabled on this server") - body = parse_json_object_from_request(request) assert_params_in_dict(body, [ @@ -252,6 +255,14 @@ class PasswordResetSubmitTokenServlet(RestServlet): 400, "This medium is currently not supported for password resets", ) + if self.config.email_password_reset_behaviour == "off": + if self.config.password_resets_were_disabled_due_to_email_config: + logger.warn( + "User password resets have been disabled due to lack of email config" + ) + raise SynapseError( + 400, "Email-based password resets have been disabled on this server", + ) sid = parse_string(request, "sid") client_secret = parse_string(request, "client_secret") diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py
index 55c4ed5660..63bdc33564 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py
@@ -79,7 +79,7 @@ class AccountValiditySendMailServlet(RestServlet): if not self.account_validity.renew_by_email_enabled: raise AuthError(403, "Account renewal via email is disabled on this server.") - requester = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request, allow_expired=True) user_id = requester.user.to_string() yield self.account_activity_handler.send_renewal_email_to_user(user_id) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 8569677355..a4929dd5db 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py
@@ -386,8 +386,10 @@ class MediaRepository(object): raise SynapseError(502, "Failed to fetch remote media") except SynapseError: - logger.exception("Failed to fetch remote media %s/%s", - server_name, media_id) + logger.warn( + "Failed to fetch remote media %s/%s", + server_name, media_id, + ) raise except NotRetryingDestination: logger.warn("Not retrying destination %r", server_name) diff --git a/synapse/server.py b/synapse/server.py
index 9229a68a8d..a54e023cc9 100644 --- a/synapse/server.py +++ b/synapse/server.py
@@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,6 +37,7 @@ from synapse.crypto import context_factory from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.spamcheck import SpamChecker +from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.events.utils import EventClientSerializer from synapse.federation.federation_client import FederationClient from synapse.federation.federation_server import ( @@ -178,6 +181,7 @@ class HomeServer(object): 'groups_attestation_renewer', 'secrets', 'spam_checker', + 'third_party_event_rules', 'room_member_handler', 'federation_registry', 'server_notices_manager', @@ -483,6 +487,9 @@ class HomeServer(object): def build_spam_checker(self): return SpamChecker(self) + def build_third_party_event_rules(self): + return ThirdPartyEventRules(self) + def build_room_member_handler(self): if self.config.worker_app: return RoomMemberWorkerHandler(self) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 71316f7d09..0ca6f6121f 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py
@@ -279,23 +279,37 @@ class DataStore( """ Counts the number of users who used this homeserver in the last 24 hours. """ + yesterday = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24) + return self.runInteraction("count_daily_users", self._count_users, yesterday,) - def _count_users(txn): - yesterday = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24) - - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM user_ips - WHERE last_seen > ? - GROUP BY user_id - ) u - """ - - txn.execute(sql, (yesterday,)) - count, = txn.fetchone() - return count + def count_monthly_users(self): + """ + Counts the number of users who used this homeserver in the last 30 days. + Note this method is intended for phonehome metrics only and is different + from the mau figure in synapse.storage.monthly_active_users which, + amongst other things, includes a 3 day grace period before a user counts. + """ + thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + return self.runInteraction( + "count_monthly_users", + self._count_users, + thirty_days_ago, + ) - return self.runInteraction("count_users", _count_users) + def _count_users(self, txn, time_from): + """ + Returns number of users seen in the past time_from period + """ + sql = """ + SELECT COALESCE(count(*), 0) FROM ( + SELECT user_id FROM user_ips + WHERE last_seen > ? + GROUP BY user_id + ) u + """ + txn.execute(sql, (time_from,)) + count, = txn.fetchone() + return count def count_r30_users(self): """ diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index ae891aa332..941c07fce5 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py
@@ -299,12 +299,12 @@ class SQLBaseStore(object): def select_users_with_no_expiration_date_txn(txn): """Retrieves the list of registered users with no expiration date from the - database. + database, filtering out deactivated users. """ sql = ( "SELECT users.name FROM users" " LEFT JOIN account_validity ON (users.name = account_validity.user_id)" - " WHERE account_validity.user_id is NULL;" + " WHERE account_validity.user_id is NULL AND users.deactivated = 0;" ) txn.execute(sql, []) diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index f9162be9b9..f631fb1733 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py
@@ -17,7 +17,7 @@ import itertools import logging -from collections import OrderedDict, deque, namedtuple +from collections import Counter as c_counter, OrderedDict, deque, namedtuple from functools import wraps from six import iteritems, text_type @@ -33,6 +33,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.metrics import BucketCollector from synapse.metrics.background_process_metrics import run_as_background_process from synapse.state import StateResolutionStore from synapse.storage.background_updates import BackgroundUpdateStore @@ -220,13 +221,39 @@ class EventsStore( EventsWorkerStore, BackgroundUpdateStore, ): - def __init__(self, db_conn, hs): super(EventsStore, self).__init__(db_conn, hs) self._event_persist_queue = _EventPeristenceQueue() self._state_resolution_handler = hs.get_state_resolution_handler() + # Collect metrics on the number of forward extremities that exist. + # Counter of number of extremities to count + self._current_forward_extremities_amount = c_counter() + + BucketCollector( + "synapse_forward_extremities", + lambda: self._current_forward_extremities_amount, + buckets=[1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"] + ) + + # Read the extrems every 60 minutes + hs.get_clock().looping_call(self._read_forward_extremities, 60 * 60 * 1000) + + @defer.inlineCallbacks + def _read_forward_extremities(self): + def fetch(txn): + txn.execute( + """ + select count(*) c from event_forward_extremities + group by room_id + """ + ) + return txn.fetchall() + + res = yield self.runInteraction("read_forward_extremities", fetch) + self._current_forward_extremities_amount = c_counter(list(x[0] for x in res)) + @defer.inlineCallbacks def persist_events(self, events_and_contexts, backfilled=False): """ @@ -568,17 +595,11 @@ class EventsStore( ) txn.execute(sql, batch) - results.extend( - r[0] - for r in txn - if not json.loads(r[1]).get("soft_failed") - ) + results.extend(r[0] for r in txn if not json.loads(r[1]).get("soft_failed")) for chunk in batch_iter(event_ids, 100): yield self.runInteraction( - "_get_events_which_are_prevs", - _get_events_which_are_prevs_txn, - chunk, + "_get_events_which_are_prevs", _get_events_which_are_prevs_txn, chunk ) defer.returnValue(results) @@ -640,9 +661,7 @@ class EventsStore( for chunk in batch_iter(event_ids, 100): yield self.runInteraction( - "_get_prevs_before_rejected", - _get_prevs_before_rejected_txn, - chunk, + "_get_prevs_before_rejected", _get_prevs_before_rejected_txn, chunk ) defer.returnValue(existing_prevs) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 1dd1182e82..d36917e4d6 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py
@@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import re from six import iterkeys @@ -31,6 +32,8 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks THIRTY_MINUTES_IN_MS = 30 * 60 * 1000 +logger = logging.getLogger(__name__) + class RegistrationWorkerStore(SQLBaseStore): def __init__(self, db_conn, hs): @@ -249,6 +252,20 @@ class RegistrationWorkerStore(SQLBaseStore): ) @defer.inlineCallbacks + def delete_account_validity_for_user(self, user_id): + """Deletes the entry for the given user in the account validity table, removing + their expiration date and renewal token. + + Args: + user_id (str): ID of the user to remove from the account validity table. + """ + yield self._simple_delete_one( + table="account_validity", + keyvalues={"user_id": user_id}, + desc="delete_account_validity_for_user", + ) + + @defer.inlineCallbacks def is_server_admin(self, user): res = yield self._simple_select_one_onecol( table="users", @@ -598,12 +615,78 @@ class RegistrationStore( "user_threepids_grandfather", self._bg_user_threepids_grandfather, ) + self.register_background_update_handler( + "users_set_deactivated_flag", self._backgroud_update_set_deactivated_flag, + ) + # Create a background job for culling expired 3PID validity tokens hs.get_clock().looping_call( self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS, ) @defer.inlineCallbacks + def _backgroud_update_set_deactivated_flag(self, progress, batch_size): + """Retrieves a list of all deactivated users and sets the 'deactivated' flag to 1 + for each of them. + """ + + last_user = progress.get("user_id", "") + + def _backgroud_update_set_deactivated_flag_txn(txn): + txn.execute( + """ + SELECT + users.name, + COUNT(access_tokens.token) AS count_tokens, + COUNT(user_threepids.address) AS count_threepids + FROM users + LEFT JOIN access_tokens ON (access_tokens.user_id = users.name) + LEFT JOIN user_threepids ON (user_threepids.user_id = users.name) + WHERE (users.password_hash IS NULL OR users.password_hash = '') + AND (users.appservice_id IS NULL OR users.appservice_id = '') + AND users.is_guest = 0 + AND users.name > ? + GROUP BY users.name + ORDER BY users.name ASC + LIMIT ?; + """, + (last_user, batch_size), + ) + + rows = self.cursor_to_dict(txn) + + if not rows: + return True + + rows_processed_nb = 0 + + for user in rows: + if not user["count_tokens"] and not user["count_threepids"]: + self.set_user_deactivated_status_txn(txn, user["user_id"], True) + rows_processed_nb += 1 + + logger.info("Marked %d rows as deactivated", rows_processed_nb) + + self._background_update_progress_txn( + txn, "users_set_deactivated_flag", {"user_id": rows[-1]["name"]} + ) + + if batch_size > len(rows): + return True + else: + return False + + end = yield self.runInteraction( + "users_set_deactivated_flag", + _backgroud_update_set_deactivated_flag_txn, + ) + + if end: + yield self._end_background_update("users_set_deactivated_flag") + + defer.returnValue(batch_size) + + @defer.inlineCallbacks def add_access_token_to_user(self, user_id, token, device_id=None): """Adds an access token for the given user. @@ -1268,3 +1351,50 @@ class RegistrationStore( "delete_threepid_session", delete_threepid_session_txn, ) + + def set_user_deactivated_status_txn(self, txn, user_id, deactivated): + self._simple_update_one_txn( + txn=txn, + table="users", + keyvalues={"name": user_id}, + updatevalues={"deactivated": 1 if deactivated else 0}, + ) + self._invalidate_cache_and_stream( + txn, self.get_user_deactivated_status, (user_id,), + ) + + @defer.inlineCallbacks + def set_user_deactivated_status(self, user_id, deactivated): + """Set the `deactivated` property for the provided user to the provided value. + + Args: + user_id (str): The ID of the user to set the status for. + deactivated (bool): The value to set for `deactivated`. + """ + + yield self.runInteraction( + "set_user_deactivated_status", + self.set_user_deactivated_status_txn, + user_id, deactivated, + ) + + @cachedInlineCallbacks() + def get_user_deactivated_status(self, user_id): + """Retrieve the value for the `deactivated` property for the provided user. + + Args: + user_id (str): The ID of the user to retrieve the status for. + + Returns: + defer.Deferred(bool): The requested value. + """ + + res = yield self._simple_select_one_onecol( + table="users", + keyvalues={"name": user_id}, + retcol="deactivated", + desc="get_user_deactivated_status", + ) + + # Convert the integer into a boolean. + defer.returnValue(res == 1) diff --git a/synapse/storage/schema/delta/55/users_alter_deactivated.sql b/synapse/storage/schema/delta/55/users_alter_deactivated.sql new file mode 100644
index 0000000000..dabdde489b --- /dev/null +++ b/synapse/storage/schema/delta/55/users_alter_deactivated.sql
@@ -0,0 +1,19 @@ +/* Copyright 2019 The Matrix.org Foundation C.I.C. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE users ADD deactivated SMALLINT DEFAULT 0 NOT NULL; + +INSERT INTO background_updates (update_name, progress_json) VALUES + ('users_set_deactivated_flag', '{}'); diff --git a/synctl b/synctl
index 07a68e6d85..30d751236f 100755 --- a/synctl +++ b/synctl
@@ -69,10 +69,14 @@ def abort(message, colour=RED, stream=sys.stderr): sys.exit(1) -def start(configfile): +def start(configfile, daemonize=True): write("Starting ...") args = SYNAPSE - args.extend(["--daemonize", "-c", configfile]) + + if daemonize: + args.extend(["--daemonize", "-c", configfile]) + else: + args.extend(["-c", configfile]) try: subprocess.check_call(args) @@ -143,12 +147,21 @@ def main(): help="start or stop all the workers in the given directory" " and the main synapse process", ) + parser.add_argument( + "--no-daemonize", + action="store_false", + help="Run synapse in the foreground for debugging. " + "Will work only if the daemonize option is not set in the config." + ) options = parser.parse_args() if options.worker and options.all_processes: write('Cannot use "--worker" with "--all-processes"', stream=sys.stderr) sys.exit(1) + if options.no_daemonize and options.all_processes: + write('Cannot use "--no-daemonize" with "--all-processes"', stream=sys.stderr) + sys.exit(1) configfile = options.configfile @@ -276,7 +289,7 @@ def main(): # Check if synapse is already running if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())): abort("synapse.app.homeserver already running") - start(configfile) + start(configfile, bool(options.no_daemonize)) for worker in workers: env = os.environ.copy() diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index 9bc5f07de1..72760a0733 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py
@@ -15,6 +15,7 @@ import os +import attr import pkg_resources from twisted.internet.defer import Deferred @@ -25,6 +26,13 @@ from synapse.rest.client.v1 import login, room from tests.unittest import HomeserverTestCase +@attr.s +class _User(object): + "Helper wrapper for user ID and access token" + id = attr.ib() + token = attr.ib() + + class EmailPusherTests(HomeserverTestCase): servlets = [ @@ -71,25 +79,32 @@ class EmailPusherTests(HomeserverTestCase): return hs - def test_sends_email(self): - + def prepare(self, reactor, clock, hs): # Register the user who gets notified - user_id = self.register_user("user", "pass") - access_token = self.login("user", "pass") - - # Register the user who sends the message - other_user_id = self.register_user("otheruser", "pass") - other_access_token = self.login("otheruser", "pass") + self.user_id = self.register_user("user", "pass") + self.access_token = self.login("user", "pass") + + # Register other users + self.others = [ + _User( + id=self.register_user("otheruser1", "pass"), + token=self.login("otheruser1", "pass"), + ), + _User( + id=self.register_user("otheruser2", "pass"), + token=self.login("otheruser2", "pass"), + ), + ] # Register the pusher user_tuple = self.get_success( - self.hs.get_datastore().get_user_by_access_token(access_token) + self.hs.get_datastore().get_user_by_access_token(self.access_token) ) token_id = user_tuple["token_id"] - self.get_success( + self.pusher = self.get_success( self.hs.get_pusherpool().add_pusher( - user_id=user_id, + user_id=self.user_id, access_token=token_id, kind="email", app_id="m.email", @@ -101,22 +116,54 @@ class EmailPusherTests(HomeserverTestCase): ) ) - # Create a room - room = self.helper.create_room_as(user_id, tok=access_token) + def test_simple_sends_email(self): + # Create a simple room with two users + room = self.helper.create_room_as(self.user_id, tok=self.access_token) + self.helper.invite( + room=room, src=self.user_id, tok=self.access_token, targ=self.others[0].id, + ) + self.helper.join(room=room, user=self.others[0].id, tok=self.others[0].token) - # Invite the other person - self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id) + # The other user sends some messages + self.helper.send(room, body="Hi!", tok=self.others[0].token) + self.helper.send(room, body="There!", tok=self.others[0].token) - # The other user joins - self.helper.join(room=room, user=other_user_id, tok=other_access_token) + # We should get emailed about that message + self._check_for_mail() - # The other user sends some messages - self.helper.send(room, body="Hi!", tok=other_access_token) - self.helper.send(room, body="There!", tok=other_access_token) + def test_multiple_members_email(self): + # We want to test multiple notifications, so we pause processing of push + # while we send messages. + self.pusher._pause_processing() + + # Create a simple room with multiple other users + room = self.helper.create_room_as(self.user_id, tok=self.access_token) + + for other in self.others: + self.helper.invite( + room=room, src=self.user_id, tok=self.access_token, targ=other.id, + ) + self.helper.join(room=room, user=other.id, tok=other.token) + + # The other users send some messages + self.helper.send(room, body="Hi!", tok=self.others[0].token) + self.helper.send(room, body="There!", tok=self.others[1].token) + self.helper.send(room, body="There!", tok=self.others[1].token) + + # Nothing should have happened yet, as we're paused. + assert not self.email_attempts + + self.pusher._resume_processing() + + # We should get emailed about those messages + self._check_for_mail() + + def _check_for_mail(self): + "Check that the user receives an email notification" # Get the stream ordering before it gets sent pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) last_stream_ordering = pushers[0]["last_stream_ordering"] @@ -126,7 +173,7 @@ class EmailPusherTests(HomeserverTestCase): # It hasn't succeeded yet, so the stream ordering shouldn't have moved pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) self.assertEqual(last_stream_ordering, pushers[0]["last_stream_ordering"]) @@ -143,7 +190,7 @@ class EmailPusherTests(HomeserverTestCase): # The stream ordering has increased pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering) diff --git a/tests/rest/client/third_party_rules.py b/tests/rest/client/third_party_rules.py new file mode 100644
index 0000000000..7167fc56b6 --- /dev/null +++ b/tests/rest/client/third_party_rules.py
@@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.rest import admin +from synapse.rest.client.v1 import login, room + +from tests import unittest + + +class ThirdPartyRulesTestModule(object): + def __init__(self, config): + pass + + def check_event_allowed(self, event, context): + if event.type == "foo.bar.forbidden": + return False + else: + return True + + @staticmethod + def parse_config(config): + return config + + +class ThirdPartyRulesTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + config["third_party_event_rules"] = { + "module": "tests.rest.client.third_party_rules.ThirdPartyRulesTestModule", + "config": {}, + } + + self.hs = self.setup_test_homeserver(config=config) + return self.hs + + def test_third_party_rules(self): + """Tests that a forbidden event is forbidden from being sent, but an allowed one + can be sent. + """ + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + room_id = self.helper.create_room_as(user_id, tok=tok) + + request, channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/foo.bar.allowed/1" % room_id, + {}, + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + request, channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/foo.bar.forbidden/1" % room_id, + {}, + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"403", channel.result) diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py
index 0d1c0868ce..a60a4a3b87 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py
@@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os import re from email.parser import Parser @@ -239,3 +240,47 @@ class PasswordResetTestCase(unittest.HomeserverTestCase): ) self.render(request) self.assertEquals(expected_code, channel.code, channel.result) + + +class DeactivateTestCase(unittest.HomeserverTestCase): + + servlets = [ + synapse.rest.admin.register_servlets_for_client_rest_resource, + login.register_servlets, + account.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + hs = self.setup_test_homeserver() + return hs + + def test_deactivate_account(self): + user_id = self.register_user("kermit", "test") + tok = self.login("kermit", "test") + + request_data = json.dumps({ + "auth": { + "type": "m.login.password", + "user": user_id, + "password": "test", + }, + "erase": False, + }) + request, channel = self.make_request( + "POST", + "account/deactivate", + request_data, + access_token=tok, + ) + self.render(request) + self.assertEqual(request.code, 200) + + store = self.hs.get_datastore() + + # Check that the user has been marked as deactivated. + self.assertTrue(self.get_success(store.get_user_deactivated_status(user_id))) + + # Check that this access token has been invalidated. + request, channel = self.make_request("GET", "account/whoami") + self.render(request) + self.assertEqual(request.code, 401) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index e9d8f3c734..b35b215446 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py
@@ -26,7 +26,7 @@ from synapse.api.constants import LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService from synapse.rest.client.v1 import login -from synapse.rest.client.v2_alpha import account_validity, register, sync +from synapse.rest.client.v2_alpha import account, account_validity, register, sync from tests import unittest @@ -308,6 +308,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): login.register_servlets, sync.register_servlets, account_validity.register_servlets, + account.register_servlets, ] def make_homeserver(self, reactor, clock): @@ -358,20 +359,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): def test_renewal_email(self): self.email_attempts = [] - user_id = self.register_user("kermit", "monkey") - tok = self.login("kermit", "monkey") - # We need to manually add an email address otherwise the handler will do - # nothing. - now = self.hs.clock.time_msec() - self.get_success( - self.store.user_add_threepid( - user_id=user_id, - medium="email", - address="kermit@example.com", - validated_at=now, - added_at=now, - ) - ) + (user_id, tok) = self.create_user() # Move 6 days forward. This should trigger a renewal email to be sent. self.reactor.advance(datetime.timedelta(days=6).total_seconds()) @@ -396,6 +384,44 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): def test_manual_email_send(self): self.email_attempts = [] + (user_id, tok) = self.create_user() + request, channel = self.make_request( + b"POST", + "/_matrix/client/unstable/account_validity/send_mail", + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.assertEqual(len(self.email_attempts), 1) + + def test_deactivated_user(self): + self.email_attempts = [] + + (user_id, tok) = self.create_user() + + request_data = json.dumps({ + "auth": { + "type": "m.login.password", + "user": user_id, + "password": "monkey", + }, + "erase": False, + }) + request, channel = self.make_request( + "POST", + "account/deactivate", + request_data, + access_token=tok, + ) + self.render(request) + self.assertEqual(request.code, 200) + + self.reactor.advance(datetime.timedelta(days=8).total_seconds()) + + self.assertEqual(len(self.email_attempts), 0) + + def create_user(self): user_id = self.register_user("kermit", "monkey") tok = self.login("kermit", "monkey") # We need to manually add an email address otherwise the handler will do @@ -410,7 +436,33 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): added_at=now, ) ) + return (user_id, tok) + + def test_manual_email_send_expired_account(self): + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + # We need to manually add an email address otherwise the handler will do + # nothing. + now = self.hs.clock.time_msec() + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address="kermit@example.com", + validated_at=now, + added_at=now, + ) + ) + + # Make the account expire. + self.reactor.advance(datetime.timedelta(days=8).total_seconds()) + + # Ignore all emails sent by the automatic background task and only focus on the + # ones sent manually. + self.email_attempts = [] + # Test that we're still able to manually trigger a mail to be sent. request, channel = self.make_request( b"POST", "/_matrix/client/unstable/account_validity/send_mail", diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py
index 6aa8b8b3c6..f4c81ef77d 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py
@@ -15,7 +15,6 @@ import os.path -from synapse.api.constants import EventTypes from synapse.storage import prepare_database from synapse.types import Requester, UserID @@ -23,17 +22,12 @@ from tests.unittest import HomeserverTestCase class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): - """Test the background update to clean forward extremities table. """ - def make_homeserver(self, reactor, clock): - # Hack until we understand why test_forked_graph_cleanup fails with v4 - config = self.default_config() - config['default_room_version'] = '1' - return self.setup_test_homeserver(config=config) + Test the background update to clean forward extremities table. + """ def prepare(self, reactor, clock, homeserver): self.store = homeserver.get_datastore() - self.event_creator = homeserver.get_event_creation_handler() self.room_creator = homeserver.get_room_creation_handler() # Create a test user and room @@ -42,56 +36,6 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): info = self.get_success(self.room_creator.create_room(self.requester, {})) self.room_id = info["room_id"] - def create_and_send_event(self, soft_failed=False, prev_event_ids=None): - """Create and send an event. - - Args: - soft_failed (bool): Whether to create a soft failed event or not - prev_event_ids (list[str]|None): Explicitly set the prev events, - or if None just use the default - - Returns: - str: The new event's ID. - """ - prev_events_and_hashes = None - if prev_event_ids: - prev_events_and_hashes = [[p, {}, 0] for p in prev_event_ids] - - event, context = self.get_success( - self.event_creator.create_event( - self.requester, - { - "type": EventTypes.Message, - "room_id": self.room_id, - "sender": self.user.to_string(), - "content": {"body": "", "msgtype": "m.text"}, - }, - prev_events_and_hashes=prev_events_and_hashes, - ) - ) - - if soft_failed: - event.internal_metadata.soft_failed = True - - self.get_success( - self.event_creator.send_nonmember_event(self.requester, event, context) - ) - - return event.event_id - - def add_extremity(self, event_id): - """Add the given event as an extremity to the room. - """ - self.get_success( - self.store._simple_insert( - table="event_forward_extremities", - values={"room_id": self.room_id, "event_id": event_id}, - desc="test_add_extremity", - ) - ) - - self.store.get_latest_event_ids_in_room.invalidate((self.room_id,)) - def run_background_update(self): """Re run the background update to clean up the extremities. """ @@ -131,10 +75,16 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): """ # Create the room graph - event_id_1 = self.create_and_send_event() - event_id_2 = self.create_and_send_event(True, [event_id_1]) - event_id_3 = self.create_and_send_event(True, [event_id_2]) - event_id_4 = self.create_and_send_event(False, [event_id_3]) + event_id_1 = self.create_and_send_event(self.room_id, self.user) + event_id_2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_1] + ) + event_id_3 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_2] + ) + event_id_4 = self.create_and_send_event( + self.room_id, self.user, False, [event_id_3] + ) # Check the latest events are as expected latest_event_ids = self.get_success( @@ -154,12 +104,16 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): Where SF* are soft failed, and with extremities of A and B """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_b = self.create_and_send_event(False, [event_id_sf1]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_b = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf1] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) @@ -185,13 +139,19 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): Where SF* are soft failed, and with extremities of A and B """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_sf2 = self.create_and_send_event(True, [event_id_sf1]) - event_id_b = self.create_and_send_event(False, [event_id_sf2]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_sf2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf1] + ) + event_id_b = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf2] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) @@ -227,16 +187,26 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_b = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_sf2 = self.create_and_send_event(True, [event_id_a, event_id_b]) - event_id_sf3 = self.create_and_send_event(True, [event_id_sf1]) - self.create_and_send_event(True, [event_id_sf2, event_id_sf3]) # SF4 - event_id_c = self.create_and_send_event(False, [event_id_sf3]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_b = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_sf2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a, event_id_b] + ) + event_id_sf3 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf1] + ) + self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf2, event_id_sf3] + ) # SF4 + event_id_c = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf3] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py new file mode 100644
index 0000000000..19f9ccf5e0 --- /dev/null +++ b/tests/storage/test_event_metrics.py
@@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from prometheus_client.exposition import generate_latest + +from synapse.metrics import REGISTRY +from synapse.types import Requester, UserID + +from tests.unittest import HomeserverTestCase + + +class ExtremStatisticsTestCase(HomeserverTestCase): + def test_exposed_to_prometheus(self): + """ + Forward extremity counts are exposed via Prometheus. + """ + room_creator = self.hs.get_room_creation_handler() + + user = UserID("alice", "test") + requester = Requester(user, None, False, None, None) + + # Real events, forward extremities + events = [(3, 2), (6, 2), (4, 6)] + + for event_count, extrems in events: + info = self.get_success(room_creator.create_room(requester, {})) + room_id = info["room_id"] + + last_event = None + + # Make a real event chain + for i in range(event_count): + ev = self.create_and_send_event(room_id, user, False, last_event) + last_event = [ev] + + # Sprinkle in some extremities + for i in range(extrems): + ev = self.create_and_send_event(room_id, user, False, last_event) + + # Let it run for a while, then pull out the statistics from the + # Prometheus client registry + self.reactor.advance(60 * 60 * 1000) + self.pump(1) + + items = set( + filter( + lambda x: b"synapse_forward_extremities_" in x, + generate_latest(REGISTRY).split(b"\n"), + ) + ) + + expected = set([ + b'synapse_forward_extremities_bucket{le="1.0"} 0.0', + b'synapse_forward_extremities_bucket{le="2.0"} 2.0', + b'synapse_forward_extremities_bucket{le="3.0"} 2.0', + b'synapse_forward_extremities_bucket{le="5.0"} 2.0', + b'synapse_forward_extremities_bucket{le="7.0"} 3.0', + b'synapse_forward_extremities_bucket{le="10.0"} 3.0', + b'synapse_forward_extremities_bucket{le="15.0"} 3.0', + b'synapse_forward_extremities_bucket{le="20.0"} 3.0', + b'synapse_forward_extremities_bucket{le="50.0"} 3.0', + b'synapse_forward_extremities_bucket{le="100.0"} 3.0', + b'synapse_forward_extremities_bucket{le="200.0"} 3.0', + b'synapse_forward_extremities_bucket{le="500.0"} 3.0', + b'synapse_forward_extremities_bucket{le="+Inf"} 3.0', + b'synapse_forward_extremities_count 3.0', + b'synapse_forward_extremities_sum 10.0', + ]) + + self.assertEqual(items, expected) diff --git a/tests/unittest.py b/tests/unittest.py
index 7dbb64af59..b6dc7932ce 100644 --- a/tests/unittest.py +++ b/tests/unittest.py
@@ -27,11 +27,12 @@ import twisted.logger from twisted.internet.defer import Deferred from twisted.trial import unittest +from synapse.api.constants import EventTypes from synapse.config.homeserver import HomeServerConfig from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest from synapse.server import HomeServer -from synapse.types import UserID, create_requester +from synapse.types import Requester, UserID, create_requester from synapse.util.logcontext import LoggingContext from tests.server import get_clock, make_request, render, setup_test_homeserver @@ -442,6 +443,64 @@ class HomeserverTestCase(TestCase): access_token = channel.json_body["access_token"] return access_token + def create_and_send_event( + self, room_id, user, soft_failed=False, prev_event_ids=None + ): + """ + Create and send an event. + + Args: + soft_failed (bool): Whether to create a soft failed event or not + prev_event_ids (list[str]|None): Explicitly set the prev events, + or if None just use the default + + Returns: + str: The new event's ID. + """ + event_creator = self.hs.get_event_creation_handler() + secrets = self.hs.get_secrets() + requester = Requester(user, None, False, None, None) + + prev_events_and_hashes = None + if prev_event_ids: + prev_events_and_hashes = [[p, {}, 0] for p in prev_event_ids] + + event, context = self.get_success( + event_creator.create_event( + requester, + { + "type": EventTypes.Message, + "room_id": room_id, + "sender": user.to_string(), + "content": {"body": secrets.token_hex(), "msgtype": "m.text"}, + }, + prev_events_and_hashes=prev_events_and_hashes, + ) + ) + + if soft_failed: + event.internal_metadata.soft_failed = True + + self.get_success( + event_creator.send_nonmember_event(requester, event, context) + ) + + return event.event_id + + def add_extremity(self, room_id, event_id): + """ + Add the given event as an extremity to the room. + """ + self.get_success( + self.hs.get_datastore()._simple_insert( + table="event_forward_extremities", + values={"room_id": room_id, "event_id": event_id}, + desc="test_add_extremity", + ) + ) + + self.hs.get_datastore().get_latest_event_ids_in_room.invalidate((room_id,)) + def attempt_wrong_password_login(self, username, password): """Attempts to login as the user with the given password, asserting that the attempt *fails*. diff --git a/tox.ini b/tox.ini
index 543b232ae7..0c4d562766 100644 --- a/tox.ini +++ b/tox.ini
@@ -1,5 +1,5 @@ [tox] -envlist = packaging, py27, py36, pep8, check_isort +envlist = packaging, py35, py36, py37, pep8, check_isort [base] deps = @@ -79,7 +79,7 @@ usedevelop=true # A test suite for the oldest supported versions of Python libraries, to catch # any uses of APIs not available in them. -[testenv:py27-old] +[testenv:py35-old] skip_install=True deps = # Old automat version for Twisted