summary refs log tree commit diff
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2022-12-09 09:53:29 +0000
committerErik Johnston <erik@matrix.org>2022-12-09 09:53:29 +0000
commit90d007ec574d759ffa37d659db4f3d9bbea9589f (patch)
treeac43c259d826919c1d8148461bc2dfaf93431143
parentMerge branch 'develop' into matrix-org-hotfixes (diff)
parentDelete stale non-e2e devices for users, take 2 (#14595) (diff)
downloadsynapse-90d007ec574d759ffa37d659db4f3d9bbea9589f.tar.xz
Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes
-rw-r--r--.github/workflows/tests.yml10
-rw-r--r--changelog.d/14548.misc1
-rw-r--r--changelog.d/14595.misc1
-rw-r--r--changelog.d/14610.misc1
-rw-r--r--changelog.d/14621.bugfix1
-rw-r--r--changelog.d/14625.bugfix1
-rw-r--r--changelog.d/14632.bugfix1
-rw-r--r--changelog.d/14636.misc1
-rw-r--r--changelog.d/14637.bugfix1
-rw-r--r--changelog.d/14639.bugfix1
-rw-r--r--changelog.d/14643.bugfix1
-rw-r--r--changelog.d/14645.misc1
-rw-r--r--docker/Dockerfile2
-rw-r--r--docker/Dockerfile-workers3
-rw-r--r--docker/complement/Dockerfile3
-rw-r--r--docker/editable.Dockerfile75
-rw-r--r--docs/usage/administration/admin_faq.md2
-rw-r--r--poetry.lock8
-rwxr-xr-xscripts-dev/complement.sh100
-rw-r--r--synapse/_scripts/register_new_matrix_user.py5
-rw-r--r--synapse/api/errors.py6
-rw-r--r--synapse/handlers/device.py31
-rw-r--r--synapse/http/server.py19
-rw-r--r--synapse/res/templates/_base.html6
-rw-r--r--synapse/res/templates/notice_expiry.html6
-rw-r--r--synapse/res/templates/notif_mail.html6
-rw-r--r--synapse/rest/client/receipts.py5
-rw-r--r--synapse/rest/media/v1/media_repository.py4
-rw-r--r--synapse/storage/databases/main/devices.py79
-rw-r--r--synapse/storage/schema/main/delta/73/22_rebuild_user_dir_stats.sql29
-rw-r--r--synapse/util/caches/stream_change_cache.py9
-rw-r--r--synapse/util/httpresourcetree.py6
-rw-r--r--tests/handlers/test_device.py2
-rw-r--r--tests/rest/admin/test_user.py2
-rw-r--r--tests/rest/client/test_login_token_request.py4
-rw-r--r--tests/rest/client/test_receipts.py76
-rw-r--r--tests/rest/client/test_rendezvous.py2
-rw-r--r--tests/storage/test_client_ips.py4
-rw-r--r--tests/test_server.py2
-rw-r--r--tests/util/test_stream_change_cache.py7
40 files changed, 458 insertions, 66 deletions
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml

index 4cb2459b37..f07655d982 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml
@@ -197,8 +197,12 @@ jobs: - run: sudo apt-get -qq install xmlsec1 - name: Set up PostgreSQL ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }} + # 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer. + # 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections. run: | docker run -d -p 5432:5432 \ + --tmpfs /var/lib/postgres:rw,size=6144m \ + --mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \ -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ postgres:${{ matrix.job.postgres-version }} @@ -220,10 +224,10 @@ jobs: if: ${{ matrix.job.postgres-version }} timeout-minutes: 2 run: until pg_isready -h localhost; do sleep 1; done - - run: poetry run trial --jobs=2 tests + - run: poetry run trial --jobs=6 tests env: SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }} - SYNAPSE_POSTGRES_HOST: localhost + SYNAPSE_POSTGRES_HOST: /var/run/postgresql SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_PASSWORD: postgres - name: Dump logs @@ -292,7 +296,7 @@ jobs: python-version: '3.7' extras: "all test" - - run: poetry run trial -j2 tests + - run: poetry run trial -j6 tests - name: Dump logs # Logs are most useful when the command fails, always include them. if: ${{ always() }} diff --git a/changelog.d/14548.misc b/changelog.d/14548.misc new file mode 100644
index 0000000000..416332015c --- /dev/null +++ b/changelog.d/14548.misc
@@ -0,0 +1 @@ +Add `--editable` flag to `complement.sh` which uses an editable install of Synapse for faster turn-around times whilst developing iteratively. \ No newline at end of file diff --git a/changelog.d/14595.misc b/changelog.d/14595.misc new file mode 100644
index 0000000000..f9bfc581ad --- /dev/null +++ b/changelog.d/14595.misc
@@ -0,0 +1 @@ +Prune user's old devices on login if they have too many. diff --git a/changelog.d/14610.misc b/changelog.d/14610.misc new file mode 100644
index 0000000000..097bf41aca --- /dev/null +++ b/changelog.d/14610.misc
@@ -0,0 +1 @@ +Alter some unit test environment parameters to decrease time spent running tests. diff --git a/changelog.d/14621.bugfix b/changelog.d/14621.bugfix new file mode 100644
index 0000000000..cb95a87d92 --- /dev/null +++ b/changelog.d/14621.bugfix
@@ -0,0 +1 @@ +Return spec-compliant JSON errors when unknown endpoints are requested. diff --git a/changelog.d/14625.bugfix b/changelog.d/14625.bugfix new file mode 100644
index 0000000000..a4d1216690 --- /dev/null +++ b/changelog.d/14625.bugfix
@@ -0,0 +1 @@ +Fix html templates to load images only on HTTPS. Contributed by @ashfame. diff --git a/changelog.d/14632.bugfix b/changelog.d/14632.bugfix new file mode 100644
index 0000000000..323d10f1b0 --- /dev/null +++ b/changelog.d/14632.bugfix
@@ -0,0 +1 @@ +Reject invalid read receipt requests with empty room or event IDs. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/14636.misc b/changelog.d/14636.misc new file mode 100644
index 0000000000..9d24f6888f --- /dev/null +++ b/changelog.d/14636.misc
@@ -0,0 +1 @@ +Remove useless cargo install with apt from Dockerfile. diff --git a/changelog.d/14637.bugfix b/changelog.d/14637.bugfix new file mode 100644
index 0000000000..ab6db383c6 --- /dev/null +++ b/changelog.d/14637.bugfix
@@ -0,0 +1 @@ +Fix a bug introduced in v1.67.0 where not specifying a config file or a server URL would lead to the `register_new_matrix_user` script failing. \ No newline at end of file diff --git a/changelog.d/14639.bugfix b/changelog.d/14639.bugfix new file mode 100644
index 0000000000..8730b10afe --- /dev/null +++ b/changelog.d/14639.bugfix
@@ -0,0 +1 @@ +Fix a long-standing bug where the user directory and room/user stats might be out of sync. diff --git a/changelog.d/14643.bugfix b/changelog.d/14643.bugfix new file mode 100644
index 0000000000..8730b10afe --- /dev/null +++ b/changelog.d/14643.bugfix
@@ -0,0 +1 @@ +Fix a long-standing bug where the user directory and room/user stats might be out of sync. diff --git a/changelog.d/14645.misc b/changelog.d/14645.misc new file mode 100644
index 0000000000..012a57a40e --- /dev/null +++ b/changelog.d/14645.misc
@@ -0,0 +1 @@ +Bump certifi from 2021.10.8 to 2022.12.7. diff --git a/docker/Dockerfile b/docker/Dockerfile
index 7f8756e8a4..185d5bc3d4 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile
@@ -43,7 +43,7 @@ RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ apt-get update -qq && apt-get install -yqq \ - build-essential cargo git libffi-dev libssl-dev \ + build-essential git libffi-dev libssl-dev \ && rm -rf /var/lib/apt/lists/* # We install poetry in its own build stage to avoid its dependencies conflicting with diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers
index 0c2d4f3047..faf7f2cef8 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers
@@ -1,6 +1,7 @@ # syntax=docker/dockerfile:1 ARG SYNAPSE_VERSION=latest +ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION # first of all, we create a base image with an nginx which we can copy into the # target image. For repeated rebuilds, this is much faster than apt installing @@ -23,7 +24,7 @@ FROM debian:bullseye-slim AS deps_base FROM redis:6-bullseye AS redis_base # now build the final image, based on the the regular Synapse docker image -FROM matrixdotorg/synapse:$SYNAPSE_VERSION +FROM $FROM # Install supervisord with pip instead of apt, to avoid installing a second # copy of python. diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile
index c0935c99a8..be1aa1c55e 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile
@@ -7,8 +7,9 @@ # https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse ARG SYNAPSE_VERSION=latest +ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION -FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION +FROM $FROM # First of all, we copy postgres server from the official postgres image, # since for repeated rebuilds, this is much faster than apt installing # postgres each time. diff --git a/docker/editable.Dockerfile b/docker/editable.Dockerfile new file mode 100644
index 0000000000..0e8cf2e712 --- /dev/null +++ b/docker/editable.Dockerfile
@@ -0,0 +1,75 @@ +# syntax=docker/dockerfile:1 +# This dockerfile builds an editable install of Synapse. +# +# Used by `complement.sh`. Not suitable for production use. + +ARG PYTHON_VERSION=3.9 + +### +### Stage 0: generate requirements.txt +### +# We hardcode the use of Debian bullseye here because this could change upstream +# and other Dockerfiles used for testing are expecting bullseye. +FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye + +# Install Rust and other dependencies (stolen from normal Dockerfile) +# install the OS build deps +RUN \ + --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update -qq && apt-get install -yqq \ + build-essential \ + libffi-dev \ + libjpeg-dev \ + libpq-dev \ + libssl-dev \ + libwebp-dev \ + libxml++2.6-dev \ + libxslt1-dev \ + openssl \ + zlib1g-dev \ + git \ + curl \ + gosu \ + libjpeg62-turbo \ + libpq5 \ + libwebp6 \ + xmlsec1 \ + libjemalloc2 \ + && rm -rf /var/lib/apt/lists/* +ENV RUSTUP_HOME=/rust +ENV CARGO_HOME=/cargo +ENV PATH=/cargo/bin:/rust/bin:$PATH +RUN mkdir /rust /cargo +RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal + + +# Make a base copy of the editable source tree, so that we have something to +# install and build now — even though it's going to be covered up by a mount +# at runtime. +COPY synapse /editable-src/synapse/ +COPY rust /editable-src/rust/ +# ... and what we need to `pip install`. +COPY pyproject.toml poetry.lock README.rst build_rust.py Cargo.toml Cargo.lock /editable-src/ + +RUN pip install poetry +RUN poetry config virtualenvs.create false +RUN cd /editable-src && poetry install --extras all + +# Make copies of useful things for inspection: +# - the Rust module (must be copied to the editable source tree before startup) +# - poetry.lock is useful for checking if dependencies have changed. +RUN cp /editable-src/synapse/synapse_rust.abi3.so /synapse_rust.abi3.so.bak +RUN cp /editable-src/poetry.lock /poetry.lock.bak + + +### Extra setup from original Dockerfile +COPY ./docker/start.py /start.py +COPY ./docker/conf /conf + +EXPOSE 8008/tcp 8009/tcp 8448/tcp + +ENTRYPOINT ["/start.py"] + +HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \ + CMD curl -fSs http://localhost:8008/health || exit 1 diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md
index 7ba5a83f04..0bfb732464 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md
@@ -79,7 +79,7 @@ Here we can see that the request has been tagged with `GET-37`. (The tag depends grep 'GET-37' homeserver.log ``` -If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code). +If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see [quoting code](https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code)). What do all those fields in the 'Processed' line mean? diff --git a/poetry.lock b/poetry.lock
index 90b363a548..1c10f0458a 100644 --- a/poetry.lock +++ b/poetry.lock
@@ -106,11 +106,11 @@ frozendict = ["frozendict (>=1.0)"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cffi" @@ -1709,8 +1709,8 @@ canonicaljson = [ {file = "canonicaljson-1.6.4.tar.gz", hash = "sha256:6c09b2119511f30eb1126cfcd973a10824e20f1cfd25039cde3d1218dd9c8d8f"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 7744b47097..8741ba3e34 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh
@@ -53,6 +53,12 @@ Run the complement test suite on Synapse. Only build the Docker images. Don't actually run Complement. Conflicts with -f/--fast. + -e, --editable + Use an editable build of Synapse, rebuilding the image if necessary. + This is suitable for use in development where a fast turn-around time + is important. + Not suitable for use in CI in case the editable environment is impure. + For help on arguments to 'go test', run 'go help testflag'. EOF } @@ -73,6 +79,9 @@ while [ $# -ge 1 ]; do "--build-only") skip_complement_run=1 ;; + "-e"|"--editable") + use_editable_synapse=1 + ;; *) # unknown arg: presumably an argument to gotest. break the loop. break @@ -96,25 +105,76 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then echo "Checkout available at 'complement-${COMPLEMENT_REF}'" fi +if [ -n "$use_editable_synapse" ]; then + if [[ -e synapse/synapse_rust.abi3.so ]]; then + # In an editable install, back up the host's compiled Rust module to prevent + # inconvenience; the container will overwrite the module with its own copy. + mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host + # And restore it on exit: + synapse_pkg=`realpath synapse` + trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT + fi + + editable_mount="$(realpath .):/editable-src:z" + if docker inspect complement-synapse-editable &>/dev/null; then + # complement-synapse-editable already exists: see if we can still use it: + # - The Rust module must still be importable; it will fail to import if the Rust source has changed. + # - The Poetry lock file must be the same (otherwise we assume dependencies have changed) + + # First set up the module in the right place for an editable installation. + docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so + + if (docker run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \ + && docker run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then + skip_docker_build=1 + else + echo "Editable Synapse image is stale. Will rebuild." + unset skip_docker_build + fi + fi +fi + if [ -z "$skip_docker_build" ]; then - # Build the base Synapse image from the local checkout - echo_if_github "::group::Build Docker image: matrixdotorg/synapse" - docker build -t matrixdotorg/synapse \ - --build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \ - --build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \ - -f "docker/Dockerfile" . - echo_if_github "::endgroup::" - - # Build the workers docker image (from the base Synapse image we just built). - echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers" - docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" . - echo_if_github "::endgroup::" - - # Build the unified Complement image (from the worker Synapse image we just built). - echo_if_github "::group::Build Docker image: complement/Dockerfile" - docker build -t complement-synapse \ - -f "docker/complement/Dockerfile" "docker/complement" - echo_if_github "::endgroup::" + if [ -n "$use_editable_synapse" ]; then + + # Build a special image designed for use in development with editable + # installs. + docker build -t synapse-editable \ + -f "docker/editable.Dockerfile" . + + docker build -t synapse-workers-editable \ + --build-arg FROM=synapse-editable \ + -f "docker/Dockerfile-workers" . + + docker build -t complement-synapse-editable \ + --build-arg FROM=synapse-workers-editable \ + -f "docker/complement/Dockerfile" "docker/complement" + + # Prepare the Rust module + docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so + + else + + # Build the base Synapse image from the local checkout + echo_if_github "::group::Build Docker image: matrixdotorg/synapse" + docker build -t matrixdotorg/synapse \ + --build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \ + --build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \ + -f "docker/Dockerfile" . + echo_if_github "::endgroup::" + + # Build the workers docker image (from the base Synapse image we just built). + echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers" + docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" . + echo_if_github "::endgroup::" + + # Build the unified Complement image (from the worker Synapse image we just built). + echo_if_github "::group::Build Docker image: complement/Dockerfile" + docker build -t complement-synapse \ + -f "docker/complement/Dockerfile" "docker/complement" + echo_if_github "::endgroup::" + + fi fi if [ -n "$skip_complement_run" ]; then @@ -123,6 +183,10 @@ if [ -n "$skip_complement_run" ]; then fi export COMPLEMENT_BASE_IMAGE=complement-synapse +if [ -n "$use_editable_synapse" ]; then + export COMPLEMENT_BASE_IMAGE=complement-synapse-editable + export COMPLEMENT_HOST_MOUNTS="$editable_mount" +fi extra_test_args=() diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
index 0c4504d5d8..2b74a40166 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py
@@ -222,6 +222,7 @@ def main() -> None: args = parser.parse_args() + config: Optional[Dict[str, Any]] = None if "config" in args and args.config: config = yaml.safe_load(args.config) @@ -229,7 +230,7 @@ def main() -> None: secret = args.shared_secret else: # argparse should check that we have either config or shared secret - assert config + assert config is not None secret = config.get("registration_shared_secret") secret_file = config.get("registration_shared_secret_path") @@ -244,7 +245,7 @@ def main() -> None: if args.server_url: server_url = args.server_url - elif config: + elif config is not None: server_url = _find_client_listener(config) if not server_url: server_url = _DEFAULT_SERVER_URL diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index e2cfcea0f2..76ef12ed3a 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py
@@ -300,10 +300,8 @@ class InteractiveAuthIncompleteError(Exception): class UnrecognizedRequestError(SynapseError): """An error indicating we don't understand the request you're trying to make""" - def __init__( - self, msg: str = "Unrecognized request", errcode: str = Codes.UNRECOGNIZED - ): - super().__init__(400, msg, errcode) + def __init__(self, msg: str = "Unrecognized request", code: int = 400): + super().__init__(code, msg, Codes.UNRECOGNIZED) class NotFoundError(SynapseError): diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index d4750a32e6..7674c187ef 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py
@@ -52,6 +52,7 @@ from synapse.util import stringutils from synapse.util.async_helpers import Linearizer from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.cancellation import cancellable +from synapse.util.iterutils import batch_iter from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination @@ -421,6 +422,9 @@ class DeviceHandler(DeviceWorkerHandler): self._check_device_name_length(initial_device_display_name) + # Prune the user's device list if they already have a lot of devices. + await self._prune_too_many_devices(user_id) + if device_id is not None: new_device = await self.store.store_device( user_id=user_id, @@ -452,6 +456,31 @@ class DeviceHandler(DeviceWorkerHandler): raise errors.StoreError(500, "Couldn't generate a device ID.") + async def _prune_too_many_devices(self, user_id: str) -> None: + """Delete any excess old devices this user may have.""" + device_ids = await self.store.check_too_many_devices_for_user(user_id) + if not device_ids: + return + + # We don't want to block and try and delete tonnes of devices at once, + # so we cap the number of devices we delete synchronously. + first_batch, remaining_device_ids = device_ids[:10], device_ids[10:] + await self.delete_devices(user_id, first_batch) + + if not remaining_device_ids: + return + + # Now spawn a background loop that deletes the rest. + async def _prune_too_many_devices_loop() -> None: + for batch in batch_iter(remaining_device_ids, 10): + await self.delete_devices(user_id, batch) + + await self.clock.sleep(1) + + run_as_background_process( + "_prune_too_many_devices_loop", _prune_too_many_devices_loop + ) + async def _delete_stale_devices(self) -> None: """Background task that deletes devices which haven't been accessed for more than a configured time period. @@ -481,7 +510,7 @@ class DeviceHandler(DeviceWorkerHandler): device_ids = [d for d in device_ids if d != except_device_id] await self.delete_devices(user_id, device_ids) - async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: + async def delete_devices(self, user_id: str, device_ids: Collection[str]) -> None: """Delete several devices Args: diff --git a/synapse/http/server.py b/synapse/http/server.py
index 051a1899a0..2563858f3c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py
@@ -577,7 +577,24 @@ def _unrecognised_request_handler(request: Request) -> NoReturn: Args: request: Unused, but passed in to match the signature of ServletCallback. """ - raise UnrecognizedRequestError() + raise UnrecognizedRequestError(code=404) + + +class UnrecognizedRequestResource(resource.Resource): + """ + Similar to twisted.web.resource.NoResource, but returns a JSON 404 with an + errcode of M_UNRECOGNIZED. + """ + + def render(self, request: SynapseRequest) -> int: + f = failure.Failure(UnrecognizedRequestError(code=404)) + return_json_error(f, request, None) + # A response has already been sent but Twisted requires either NOT_DONE_YET + # or the response bytes as a return value. + return NOT_DONE_YET + + def getChild(self, name: str, request: Request) -> resource.Resource: + return self class RootRedirect(resource.Resource): diff --git a/synapse/res/templates/_base.html b/synapse/res/templates/_base.html
index 46439fce6a..4b5cc7bcb6 100644 --- a/synapse/res/templates/_base.html +++ b/synapse/res/templates/_base.html
@@ -13,13 +13,13 @@ <body> <header class="mx_Header"> {% if app_name == "Riot" %} - <img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> + <img src="https://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> {% elif app_name == "Vector" %} - <img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> + <img src="https://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> {% elif app_name == "Element" %} <img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/> {% else %} - <img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> + <img src="https://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> {% endif %} </header> diff --git a/synapse/res/templates/notice_expiry.html b/synapse/res/templates/notice_expiry.html
index 406397aaca..f62038e111 100644 --- a/synapse/res/templates/notice_expiry.html +++ b/synapse/res/templates/notice_expiry.html
@@ -21,13 +21,13 @@ </td> <td class="logo"> {% if app_name == "Riot" %} - <img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> + <img src="https://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> {% elif app_name == "Vector" %} - <img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> + <img src="https://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> {% elif app_name == "Element" %} <img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/> {% else %} - <img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> + <img src="https://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> {% endif %} </td> </tr> diff --git a/synapse/res/templates/notif_mail.html b/synapse/res/templates/notif_mail.html
index 2add9dd859..7da0fff5e9 100644 --- a/synapse/res/templates/notif_mail.html +++ b/synapse/res/templates/notif_mail.html
@@ -22,13 +22,13 @@ </td> <td class="logo"> {%- if app_name == "Riot" %} - <img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> + <img src="https://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/> {%- elif app_name == "Vector" %} - <img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> + <img src="https://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/> {%- elif app_name == "Element" %} <img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/> {%- else %} - <img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> + <img src="https://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/> {%- endif %} </td> </tr> diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py
index 18a282b22c..28b7d30ea8 100644 --- a/synapse/rest/client/receipts.py +++ b/synapse/rest/client/receipts.py
@@ -20,7 +20,7 @@ from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseRequest -from synapse.types import JsonDict +from synapse.types import EventID, JsonDict, RoomID from ._base import client_patterns @@ -56,6 +56,9 @@ class ReceiptRestServlet(RestServlet): ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) + if not RoomID.is_valid(room_id) or not event_id.startswith(EventID.SIGIL): + raise SynapseError(400, "A valid room ID and event ID must be specified") + if receipt_type not in self._known_receipt_types: raise SynapseError( 400, diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 40b0d39eb2..c70e1837af 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py
@@ -24,7 +24,6 @@ from matrix_common.types.mxc_uri import MXCUri import twisted.internet.error import twisted.web.http from twisted.internet.defer import Deferred -from twisted.web.resource import Resource from synapse.api.errors import ( FederationDeniedError, @@ -35,6 +34,7 @@ from synapse.api.errors import ( ) from synapse.config._base import ConfigError from synapse.config.repository import ThumbnailRequirement +from synapse.http.server import UnrecognizedRequestResource from synapse.http.site import SynapseRequest from synapse.logging.context import defer_to_thread from synapse.metrics.background_process_metrics import run_as_background_process @@ -1046,7 +1046,7 @@ class MediaRepository: return removed_media, len(removed_media) -class MediaRepositoryResource(Resource): +class MediaRepositoryResource(UnrecognizedRequestResource): """File uploading and downloading. Uploads are POSTed to a resource which returns a token which is used to GET diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index a5bb4d404e..08ccd46a2b 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py
@@ -1569,6 +1569,72 @@ class DeviceBackgroundUpdateStore(SQLBaseStore): return rows + async def check_too_many_devices_for_user(self, user_id: str) -> List[str]: + """Check if the user has a lot of devices, and if so return the set of + devices we can prune. + + This does *not* return hidden devices or devices with E2E keys. + """ + + num_devices = await self.db_pool.simple_select_one_onecol( + table="devices", + keyvalues={"user_id": user_id, "hidden": False}, + retcol="COALESCE(COUNT(*), 0)", + desc="count_devices", + ) + + # We let users have up to ten devices without pruning. + if num_devices <= 10: + return [] + + # We prune everything older than N days. + max_last_seen = self._clock.time_msec() - 14 * 24 * 60 * 60 * 1000 + + if num_devices > 50: + # If the user has more than 50 devices, then we chose a last seen + # that ensures we keep at most 50 devices. + sql = """ + SELECT last_seen FROM devices + LEFT JOIN e2e_device_keys_json USING (user_id, device_id) + WHERE + user_id = ? + AND NOT hidden + AND last_seen IS NOT NULL + AND key_json IS NULL + ORDER BY last_seen DESC + LIMIT 1 + OFFSET 50 + """ + + rows = await self.db_pool.execute( + "check_too_many_devices_for_user_last_seen", None, sql, (user_id,) + ) + if rows: + max_last_seen = max(rows[0][0], max_last_seen) + + # Now fetch the devices to delete. + sql = """ + SELECT DISTINCT device_id FROM devices + LEFT JOIN e2e_device_keys_json USING (user_id, device_id) + WHERE + user_id = ? + AND NOT hidden + AND last_seen < ? + AND key_json IS NULL + ORDER BY last_seen + """ + + def check_too_many_devices_for_user_txn( + txn: LoggingTransaction, + ) -> List[str]: + txn.execute(sql, (user_id, max_last_seen)) + return [device_id for device_id, in txn] + + return await self.db_pool.runInteraction( + "check_too_many_devices_for_user", + check_too_many_devices_for_user_txn, + ) + class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): # Because we have write access, this will be a StreamIdGenerator @@ -1627,6 +1693,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): values={}, insertion_values={ "display_name": initial_device_display_name, + "last_seen": self._clock.time_msec(), "hidden": False, }, desc="store_device", @@ -1672,7 +1739,15 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): ) raise StoreError(500, "Problem storing device.") - async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: + @cached(max_entries=0) + async def delete_device(self, user_id: str, device_id: str) -> None: + raise NotImplementedError() + + # Note: sometimes deleting rows out of `device_inbox` can take a long time, + # so we use a cache so that we deduplicate in flight requests to delete + # devices. + @cachedList(cached_method_name="delete_device", list_name="device_ids") + async def delete_devices(self, user_id: str, device_ids: Collection[str]) -> dict: """Deletes several devices. Args: @@ -1709,6 +1784,8 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): for device_id in device_ids: self.device_id_exists_cache.invalidate((user_id, device_id)) + return {} + async def update_device( self, user_id: str, device_id: str, new_display_name: Optional[str] = None ) -> None: diff --git a/synapse/storage/schema/main/delta/73/22_rebuild_user_dir_stats.sql b/synapse/storage/schema/main/delta/73/22_rebuild_user_dir_stats.sql new file mode 100644
index 0000000000..afab1e4bb7 --- /dev/null +++ b/synapse/storage/schema/main/delta/73/22_rebuild_user_dir_stats.sql
@@ -0,0 +1,29 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES + -- Set up user directory staging tables. + (7322, 'populate_user_directory_createtables', '{}', NULL), + -- Run through each room and update the user directory according to who is in it. + (7322, 'populate_user_directory_process_rooms', '{}', 'populate_user_directory_createtables'), + -- Insert all users into the user directory, if search_all_users is on. + (7322, 'populate_user_directory_process_users', '{}', 'populate_user_directory_process_rooms'), + -- Clean up user directory staging tables. + (7322, 'populate_user_directory_cleanup', '{}', 'populate_user_directory_process_users'), + -- Rebuild the room_stats_current and room_stats_state tables. + (7322, 'populate_stats_process_rooms', '{}', NULL), + -- Update the user_stats_current table. + (7322, 'populate_stats_process_users', '{}', NULL) +ON CONFLICT (update_name) DO NOTHING; diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py
index c8b17acb59..1657459549 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py
@@ -213,16 +213,17 @@ class StreamChangeCache: """ assert isinstance(stream_pos, int) - if not self._cache: - # If the cache is empty, nothing can have changed. - return False - # _cache is not valid at or before the earliest known stream position, so # return that an entity has changed. if stream_pos <= self._earliest_known_stream_pos: self.metrics.inc_misses() return True + # If the cache is empty, nothing can have changed. + if not self._cache: + self.metrics.inc_misses() + return False + self.metrics.inc_hits() return stream_pos < self._cache.peekitem()[0] diff --git a/synapse/util/httpresourcetree.py b/synapse/util/httpresourcetree.py
index a0606851f7..39fab4fe06 100644 --- a/synapse/util/httpresourcetree.py +++ b/synapse/util/httpresourcetree.py
@@ -15,7 +15,9 @@ import logging from typing import Dict -from twisted.web.resource import NoResource, Resource +from twisted.web.resource import Resource + +from synapse.http.server import UnrecognizedRequestResource logger = logging.getLogger(__name__) @@ -49,7 +51,7 @@ def create_resource_tree( for path_seg in full_path.split(b"/")[1:-1]: if path_seg not in last_resource.listNames(): # resource doesn't exist, so make a "dummy resource" - child_resource: Resource = NoResource() + child_resource: Resource = UnrecognizedRequestResource() last_resource.putChild(path_seg, child_resource) res_id = _resource_id(last_resource, path_seg) resource_mappings[res_id] = child_resource diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py
index ce7525e29c..a456bffd63 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py
@@ -115,7 +115,7 @@ class DeviceTestCase(unittest.HomeserverTestCase): "device_id": "xyz", "display_name": "display 0", "last_seen_ip": None, - "last_seen_ts": None, + "last_seen_ts": 1000000, }, device_map["xyz"], ) diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index e8c9457794..5c1ced355f 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py
@@ -3994,7 +3994,7 @@ class ShadowBanRestTestCase(unittest.HomeserverTestCase): """ Tests that shadow-banning for a user that is not a local returns a 400 """ - url = "/_synapse/admin/v1/whois/@unknown_person:unknown_domain" + url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/shadow_ban" channel = self.make_request(method, url, access_token=self.admin_user_tok) self.assertEqual(400, channel.code, msg=channel.json_body) diff --git a/tests/rest/client/test_login_token_request.py b/tests/rest/client/test_login_token_request.py
index c2e1e08811..6aedc1a11c 100644 --- a/tests/rest/client/test_login_token_request.py +++ b/tests/rest/client/test_login_token_request.py
@@ -48,13 +48,13 @@ class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase): def test_disabled(self) -> None: channel = self.make_request("POST", endpoint, {}, access_token=None) - self.assertEqual(channel.code, 400) + self.assertEqual(channel.code, 404) self.register_user(self.user, self.password) token = self.login(self.user, self.password) channel = self.make_request("POST", endpoint, {}, access_token=token) - self.assertEqual(channel.code, 400) + self.assertEqual(channel.code, 404) @override_config({"experimental_features": {"msc3882_enabled": True}}) def test_require_auth(self) -> None: diff --git a/tests/rest/client/test_receipts.py b/tests/rest/client/test_receipts.py new file mode 100644
index 0000000000..2a7fcea386 --- /dev/null +++ b/tests/rest/client/test_receipts.py
@@ -0,0 +1,76 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.rest.client import login, receipts, register +from synapse.server import HomeServer +from synapse.util import Clock + +from tests import unittest + + +class ReceiptsTestCase(unittest.HomeserverTestCase): + servlets = [ + login.register_servlets, + register.register_servlets, + receipts.register_servlets, + synapse.rest.admin.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.owner = self.register_user("owner", "pass") + self.owner_tok = self.login("owner", "pass") + + def test_send_receipt(self) -> None: + channel = self.make_request( + "POST", + "/rooms/!abc:beep/receipt/m.read/$def", + content={}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 200, channel.result) + + def test_send_receipt_invalid_room_id(self) -> None: + channel = self.make_request( + "POST", + "/rooms/not-a-room-id/receipt/m.read/$def", + content={}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 400, channel.result) + self.assertEqual( + channel.json_body["error"], "A valid room ID and event ID must be specified" + ) + + def test_send_receipt_invalid_event_id(self) -> None: + channel = self.make_request( + "POST", + "/rooms/!abc:beep/receipt/m.read/not-an-event-id", + content={}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 400, channel.result) + self.assertEqual( + channel.json_body["error"], "A valid room ID and event ID must be specified" + ) + + def test_send_receipt_invalid_receipt_type(self) -> None: + channel = self.make_request( + "POST", + "/rooms/!abc:beep/receipt/invalid-receipt-type/$def", + content={}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 400, channel.result) diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py
index ad00a476e1..c0eb5d01a6 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py
@@ -36,7 +36,7 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): def test_disabled(self) -> None: channel = self.make_request("POST", endpoint, {}, access_token=None) - self.assertEqual(channel.code, 400) + self.assertEqual(channel.code, 404) @override_config({"experimental_features": {"msc3886_endpoint": "/asd"}}) def test_redirect(self) -> None: diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py
index 49ad3c1324..a9af1babed 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py
@@ -169,6 +169,8 @@ class ClientIpStoreTestCase(unittest.HomeserverTestCase): ) ) + last_seen = self.clock.time_msec() + if after_persisting: # Trigger the storage loop self.reactor.advance(10) @@ -189,7 +191,7 @@ class ClientIpStoreTestCase(unittest.HomeserverTestCase): "device_id": device_id, "ip": None, "user_agent": None, - "last_seen": None, + "last_seen": last_seen, }, ], ) diff --git a/tests/test_server.py b/tests/test_server.py
index 2d9a0257d4..d67d7722a4 100644 --- a/tests/test_server.py +++ b/tests/test_server.py
@@ -174,7 +174,7 @@ class JsonResourceTests(unittest.TestCase): self.reactor, FakeSite(res, self.reactor), b"GET", b"/_matrix/foobar" ) - self.assertEqual(channel.code, 400) + self.assertEqual(channel.code, 404) self.assertEqual(channel.json_body["error"], "Unrecognized request") self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py
index 0305741c99..3df053493b 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py
@@ -144,9 +144,10 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase): """ cache = StreamChangeCache("#test", 1) - # With no entities, it returns False for the past, present, and future. - self.assertFalse(cache.has_any_entity_changed(0)) - self.assertFalse(cache.has_any_entity_changed(1)) + # With no entities, it returns True for the past, present, and False for + # the future. + self.assertTrue(cache.has_any_entity_changed(0)) + self.assertTrue(cache.has_any_entity_changed(1)) self.assertFalse(cache.has_any_entity_changed(2)) # We add an entity