diff --git a/.ci/scripts/test_export_data_command.sh b/.ci/scripts/test_export_data_command.sh
index ab96387a0a..224cae9216 100755
--- a/.ci/scripts/test_export_data_command.sh
+++ b/.ci/scripts/test_export_data_command.sh
@@ -21,7 +21,7 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Run the export-data command on the sqlite test database
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
@@ -41,7 +41,7 @@ fi
# Port the SQLite databse to postgres so we can check command works against postgres
echo "+++ Port SQLite3 databse to postgres"
-scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# Run the export-data command on postgres database
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
diff --git a/.ci/scripts/test_synapse_port_db.sh b/.ci/scripts/test_synapse_port_db.sh
index 797904e64c..91bd966f32 100755
--- a/.ci/scripts/test_synapse_port_db.sh
+++ b/.ci/scripts/test_synapse_port_db.sh
@@ -25,17 +25,19 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database.
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
+# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# We should be able to run twice against the same database.
echo "+++ Run synapse_port_db a second time"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
#####
@@ -46,7 +48,7 @@ echo "--- Prepare empty SQLite database"
# we do this by deleting the sqlite db, and then doing the same again.
rm .ci/test_db.db
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database.
.ci/scripts/postgres_exec.py \
@@ -54,4 +56,4 @@ scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-b
"CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
diff --git a/.dockerignore b/.dockerignore
index f6c638b0a2..617f701597 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -3,7 +3,6 @@
# things to include
!docker
-!scripts
!synapse
!MANIFEST.in
!README.rst
diff --git a/MANIFEST.in b/MANIFEST.in
index 76d14eb642..7e903518e1 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -17,7 +17,6 @@ recursive-include synapse/storage *.txt
recursive-include synapse/storage *.md
recursive-include docs *
-recursive-include scripts *
recursive-include scripts-dev *
recursive-include synapse *.pyi
recursive-include tests *.py
diff --git a/changelog.d/12113.misc b/changelog.d/12113.misc
new file mode 100644
index 0000000000..102e064053
--- /dev/null
+++ b/changelog.d/12113.misc
@@ -0,0 +1 @@
+Refactor the tests for event relations.
diff --git a/changelog.d/12118.misc b/changelog.d/12118.misc
new file mode 100644
index 0000000000..a2c397d907
--- /dev/null
+++ b/changelog.d/12118.misc
@@ -0,0 +1 @@
+Move scripts to Synapse package and expose as setuptools entry points.
diff --git a/changelog.d/12128.misc b/changelog.d/12128.misc
new file mode 100644
index 0000000000..0570a8e327
--- /dev/null
+++ b/changelog.d/12128.misc
@@ -0,0 +1 @@
+Fix data validation to compare to lists, not sequences.
diff --git a/docker/Dockerfile b/docker/Dockerfile
index a8bb9b0e7f..327275a9ca 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -46,7 +46,6 @@ RUN \
&& rm -rf /var/lib/apt/lists/*
# Copy just what we need to pip install
-COPY scripts /synapse/scripts/
COPY MANIFEST.in README.rst setup.py synctl /synapse/
COPY synapse/__init__.py /synapse/synapse/__init__.py
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
diff --git a/docs/development/database_schema.md b/docs/development/database_schema.md
index a767d3af9f..d996a7caa2 100644
--- a/docs/development/database_schema.md
+++ b/docs/development/database_schema.md
@@ -158,9 +158,9 @@ same as integers.
There are three separate aspects to this:
* Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
- `scripts/synapse_port_db`. This tells the port script to cast the integer
- value from SQLite to a boolean before writing the value to the postgres
- database.
+ `synapse/_scripts/synapse_port_db.py`. This tells the port script to cast
+ the integer value from SQLite to a boolean before writing the value to the
+ postgres database.
* Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
diff --git a/docs/usage/administration/admin_api/README.md b/docs/usage/administration/admin_api/README.md
index 2fca96f8be..3cbedc5dfa 100644
--- a/docs/usage/administration/admin_api/README.md
+++ b/docs/usage/administration/admin_api/README.md
@@ -12,7 +12,7 @@ UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
```
A new server admin user can also be created using the `register_new_matrix_user`
-command. This is a script that is located in the `scripts/` directory, or possibly
+command. This is a script that is distributed as part of synapse. It is possibly
already on your `$PATH` depending on how Synapse was installed.
Finding your user's `access_token` is client-dependent, but will usually be shown in the client's settings.
diff --git a/mypy.ini b/mypy.ini
index 38ff787609..6b1e995e64 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -23,6 +23,10 @@ files =
# https://docs.python.org/3/library/re.html#re.X
exclude = (?x)
^(
+ |synapse/_scripts/export_signing_key.py
+ |synapse/_scripts/move_remote_media_to_new_store.py
+ |synapse/_scripts/synapse_port_db.py
+ |synapse/_scripts/update_synapse_database.py
|synapse/storage/databases/__init__.py
|synapse/storage/databases/main/__init__.py
|synapse/storage/databases/main/cache.py
diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config
index 4cd1d1d5b8..185e277933 100755
--- a/scripts-dev/generate_sample_config
+++ b/scripts-dev/generate_sample_config
@@ -10,19 +10,19 @@ SAMPLE_CONFIG="docs/sample_config.yaml"
SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
check() {
- diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1
+ diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || return 1
}
if [ "$1" == "--check" ]; then
- diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || {
+ diff -u "$SAMPLE_CONFIG" <(synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml) >/dev/null || {
echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
exit 1
}
- diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || {
+ diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || {
echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
exit 1
}
else
- ./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
- ./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG"
+ synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
+ synapse/_scripts/generate_log_config.py -o "$SAMPLE_LOG_CONFIG"
fi
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index b6554a73c1..df4d4934d0 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -84,13 +84,6 @@ else
files=(
"synapse" "docker" "tests"
# annoyingly, black doesn't find these so we have to list them
- "scripts/export_signing_key"
- "scripts/generate_config"
- "scripts/generate_log_config"
- "scripts/hash_password"
- "scripts/register_new_matrix_user"
- "scripts/synapse_port_db"
- "scripts/update_synapse_database"
"scripts-dev"
"scripts-dev/build_debian_packages"
"scripts-dev/sign_json"
diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh
index c3c90f4ec6..f0e22d4ca2 100755
--- a/scripts-dev/make_full_schema.sh
+++ b/scripts-dev/make_full_schema.sh
@@ -147,7 +147,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
echo "Running db background jobs..."
-scripts/update_synapse_database --database-config --run-background-updates "$SQLITE_CONFIG"
+synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG"
# Create the PostgreSQL database.
echo "Creating postgres database..."
@@ -156,10 +156,10 @@ createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME"
echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
if [ -z "$COVERAGE" ]; then
# No coverage needed
- scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+ synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
else
# Coverage desired
- coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+ coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
fi
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
diff --git a/scripts/register_new_matrix_user b/scripts/register_new_matrix_user
deleted file mode 100755
index 00104b9d62..0000000000
--- a/scripts/register_new_matrix_user
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015, 2016 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse._scripts.register_new_matrix_user import main
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/synapse_review_recent_signups b/scripts/synapse_review_recent_signups
deleted file mode 100755
index a36d46e14c..0000000000
--- a/scripts/synapse_review_recent_signups
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2021 The Matrix.org Foundation C.I.C.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse._scripts.review_recent_signups import main
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/sync_room_to_group.pl b/scripts/sync_room_to_group.pl
deleted file mode 100755
index f0c2dfadfa..0000000000
--- a/scripts/sync_room_to_group.pl
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-use warnings;
-
-use JSON::XS;
-use LWP::UserAgent;
-use URI::Escape;
-
-if (@ARGV < 4) {
- die "usage: $0 <homeserver url> <access_token> <room_id|room_alias> <group_id>\n";
-}
-
-my ($hs, $access_token, $room_id, $group_id) = @ARGV;
-my $ua = LWP::UserAgent->new();
-$ua->timeout(10);
-
-if ($room_id =~ /^#/) {
- $room_id = uri_escape($room_id);
- $room_id = decode_json($ua->get("${hs}/_matrix/client/r0/directory/room/${room_id}?access_token=${access_token}")->decoded_content)->{room_id};
-}
-
-my $room_users = [ keys %{decode_json($ua->get("${hs}/_matrix/client/r0/rooms/${room_id}/joined_members?access_token=${access_token}")->decoded_content)->{joined}} ];
-my $group_users = [
- (map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/users?access_token=${access_token}" )->decoded_content)->{chunk}}),
- (map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/invited_users?access_token=${access_token}" )->decoded_content)->{chunk}}),
-];
-
-die "refusing to sync from empty room" unless (@$room_users);
-die "refusing to sync to empty group" unless (@$group_users);
-
-my $diff = {};
-foreach my $user (@$room_users) { $diff->{$user}++ }
-foreach my $user (@$group_users) { $diff->{$user}-- }
-
-foreach my $user (keys %$diff) {
- if ($diff->{$user} == 1) {
- warn "inviting $user";
- print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/invite/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
- }
- elsif ($diff->{$user} == -1) {
- warn "removing $user";
- print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/remove/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
- }
-}
diff --git a/setup.py b/setup.py
index 26f4650348..318df16766 100755
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import glob
import os
from typing import Any, Dict
@@ -153,8 +152,19 @@ setup(
python_requires="~=3.7",
entry_points={
"console_scripts": [
+ # Application
"synapse_homeserver = synapse.app.homeserver:main",
"synapse_worker = synapse.app.generic_worker:main",
+ # Scripts
+ "export_signing_key = synapse._scripts.export_signing_key:main",
+ "generate_config = synapse._scripts.generate_config:main",
+ "generate_log_config = synapse._scripts.generate_log_config:main",
+ "generate_signing_key = synapse._scripts.generate_signing_key:main",
+ "hash_password = synapse._scripts.hash_password:main",
+ "register_new_matrix_user = synapse._scripts.register_new_matrix_user:main",
+ "synapse_port_db = synapse._scripts.synapse_port_db:main",
+ "synapse_review_recent_signups = synapse._scripts.review_recent_signups:main",
+ "update_synapse_database = synapse._scripts.update_synapse_database:main",
]
},
classifiers=[
@@ -167,6 +177,6 @@ setup(
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
- scripts=["synctl"] + glob.glob("scripts/*"),
+ scripts=["synctl"],
cmdclass={"test": TestCommand},
)
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
index 9a01152c15..dd4c8478d5 100644
--- a/snap/snapcraft.yaml
+++ b/snap/snapcraft.yaml
@@ -20,7 +20,7 @@ apps:
generate-config:
command: generate_config
generate-signing-key:
- command: generate_signing_key.py
+ command: generate_signing_key
register-new-matrix-user:
command: register_new_matrix_user
plugs: [network]
diff --git a/scripts/export_signing_key b/synapse/_scripts/export_signing_key.py
index bf0139bd64..3d254348f1 100755
--- a/scripts/export_signing_key
+++ b/synapse/_scripts/export_signing_key.py
@@ -50,7 +50,7 @@ def format_for_config(public_key: nacl.signing.VerifyKey, expiry_ts: int):
)
-if __name__ == "__main__":
+def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -85,7 +85,6 @@ if __name__ == "__main__":
else format_plain
)
- keys = []
for file in args.key_file:
try:
res = read_signing_keys(file)
@@ -98,3 +97,7 @@ if __name__ == "__main__":
res = []
for key in res:
formatter(get_verify_key(key))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/generate_config b/synapse/_scripts/generate_config.py
index 931b40c045..75fce20b12 100755
--- a/scripts/generate_config
+++ b/synapse/_scripts/generate_config.py
@@ -6,7 +6,8 @@ import sys
from synapse.config.homeserver import HomeServerConfig
-if __name__ == "__main__":
+
+def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--config-dir",
@@ -76,3 +77,7 @@ if __name__ == "__main__":
shutil.copyfileobj(args.header_file, args.output_file)
args.output_file.write(conf)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/generate_log_config b/synapse/_scripts/generate_log_config.py
index e72a0dafb7..82fc763140 100755
--- a/scripts/generate_log_config
+++ b/synapse/_scripts/generate_log_config.py
@@ -19,7 +19,8 @@ import sys
from synapse.config.logger import DEFAULT_LOG_CONFIG
-if __name__ == "__main__":
+
+def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -42,3 +43,7 @@ if __name__ == "__main__":
out = args.output_file
out.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
out.flush()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/generate_signing_key.py b/synapse/_scripts/generate_signing_key.py
index 07df25a809..bc26d25bfd 100755
--- a/scripts/generate_signing_key.py
+++ b/synapse/_scripts/generate_signing_key.py
@@ -19,7 +19,8 @@ from signedjson.key import generate_signing_key, write_signing_keys
from synapse.util.stringutils import random_string
-if __name__ == "__main__":
+
+def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -34,3 +35,7 @@ if __name__ == "__main__":
key_id = "a_" + random_string(4)
key = (generate_signing_key(key_id),)
write_signing_keys(args.output_file, key)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/hash_password b/synapse/_scripts/hash_password.py
index 1d6fb0d700..708640c7de 100755
--- a/scripts/hash_password
+++ b/synapse/_scripts/hash_password.py
@@ -8,9 +8,6 @@ import unicodedata
import bcrypt
import yaml
-bcrypt_rounds = 12
-password_pepper = ""
-
def prompt_for_pass():
password = getpass.getpass("Password: ")
@@ -26,7 +23,10 @@ def prompt_for_pass():
return password
-if __name__ == "__main__":
+def main():
+ bcrypt_rounds = 12
+ password_pepper = ""
+
parser = argparse.ArgumentParser(
description=(
"Calculate the hash of a new password, so that passwords can be reset"
@@ -77,3 +77,7 @@ if __name__ == "__main__":
).decode("ascii")
print(hashed)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/move_remote_media_to_new_store.py b/synapse/_scripts/move_remote_media_to_new_store.py
index 875aa4781f..9667d95dfe 100755
--- a/scripts/move_remote_media_to_new_store.py
+++ b/synapse/_scripts/move_remote_media_to_new_store.py
@@ -28,7 +28,7 @@ This can be extracted from postgres with::
To use, pipe the above into::
- PYTHON_PATH=. ./scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
+ PYTHON_PATH=. synapse/_scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
"""
import argparse
diff --git a/scripts/synapse_port_db b/synapse/_scripts/synapse_port_db.py
index db354b3c8c..c38666da18 100755
--- a/scripts/synapse_port_db
+++ b/synapse/_scripts/synapse_port_db.py
@@ -1146,7 +1146,7 @@ class TerminalProgress(Progress):
##############################################
-if __name__ == "__main__":
+def main():
parser = argparse.ArgumentParser(
description="A script to port an existing synapse SQLite database to"
" a new PostgreSQL database."
@@ -1251,3 +1251,7 @@ if __name__ == "__main__":
sys.stderr.write(end_error)
sys.exit(5)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/update_synapse_database b/synapse/_scripts/update_synapse_database.py
index f43676afaa..f43676afaa 100755
--- a/scripts/update_synapse_database
+++ b/synapse/_scripts/update_synapse_database.py
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 1265738dc1..8e19e2fc26 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -383,7 +383,7 @@ class RootConfig:
Build a default configuration file
This is used when the user explicitly asks us to generate a config file
- (eg with --generate_config).
+ (eg with --generate-config).
Args:
config_dir_path: The path where the config files are kept. Used to
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 64e595e748..467275b98c 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -1428,7 +1428,7 @@ class FederationClient(FederationBase):
# Validate children_state of the room.
children_state = room.pop("children_state", [])
- if not isinstance(children_state, Sequence):
+ if not isinstance(children_state, list):
raise InvalidResponseError("'room.children_state' must be a list")
if any(not isinstance(e, dict) for e in children_state):
raise InvalidResponseError("Invalid event in 'children_state' list")
@@ -1440,14 +1440,14 @@ class FederationClient(FederationBase):
# Validate the children rooms.
children = res.get("children", [])
- if not isinstance(children, Sequence):
+ if not isinstance(children, list):
raise InvalidResponseError("'children' must be a list")
if any(not isinstance(r, dict) for r in children):
raise InvalidResponseError("Invalid room in 'children' list")
# Validate the inaccessible children.
inaccessible_children = res.get("inaccessible_children", [])
- if not isinstance(inaccessible_children, Sequence):
+ if not isinstance(inaccessible_children, list):
raise InvalidResponseError("'inaccessible_children' must be a list")
if any(not isinstance(r, str) for r in inaccessible_children):
raise InvalidResponseError(
@@ -1630,7 +1630,7 @@ def _validate_hierarchy_event(d: JsonDict) -> None:
raise ValueError("Invalid event: 'content' must be a dict")
via = content.get("via")
- if not isinstance(via, Sequence):
+ if not isinstance(via, list):
raise ValueError("Invalid event: 'via' must be a list")
if any(not isinstance(v, str) for v in via):
raise ValueError("Invalid event: 'via' must be a list of strings")
diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py
index 55c2cbdba8..3979cbba71 100644
--- a/synapse/handlers/room_summary.py
+++ b/synapse/handlers/room_summary.py
@@ -857,7 +857,7 @@ class _RoomEntry:
def _has_valid_via(e: EventBase) -> bool:
via = e.content.get("via")
- if not via or not isinstance(via, Sequence):
+ if not via or not isinstance(via, list):
return False
for v in via:
if not isinstance(v, str):
diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py
index c8db45719e..a087cd7b21 100644
--- a/tests/rest/client/test_relations.py
+++ b/tests/rest/client/test_relations.py
@@ -34,7 +34,7 @@ from tests.test_utils import make_awaitable
from tests.test_utils.event_injection import inject_event
-class RelationsTestCase(unittest.HomeserverTestCase):
+class BaseRelationsTestCase(unittest.HomeserverTestCase):
servlets = [
relations.register_servlets,
room.register_servlets,
@@ -48,7 +48,6 @@ class RelationsTestCase(unittest.HomeserverTestCase):
def default_config(self) -> dict:
# We need to enable msc1849 support for aggregations
config = super().default_config()
- config["experimental_msc1849_support_enabled"] = True
# We enable frozen dicts as relations/edits change event contents, so we
# want to test that we don't modify the events in the caches.
@@ -67,10 +66,62 @@ class RelationsTestCase(unittest.HomeserverTestCase):
res = self.helper.send(self.room, body="Hi!", tok=self.user_token)
self.parent_id = res["event_id"]
- def test_send_relation(self) -> None:
- """Tests that sending a relation using the new /send_relation works
- creates the right shape of event.
+ def _create_user(self, localpart: str) -> Tuple[str, str]:
+ user_id = self.register_user(localpart, "abc123")
+ access_token = self.login(localpart, "abc123")
+
+ return user_id, access_token
+
+ def _send_relation(
+ self,
+ relation_type: str,
+ event_type: str,
+ key: Optional[str] = None,
+ content: Optional[dict] = None,
+ access_token: Optional[str] = None,
+ parent_id: Optional[str] = None,
+ ) -> FakeChannel:
+ """Helper function to send a relation pointing at `self.parent_id`
+
+ Args:
+ relation_type: One of `RelationTypes`
+ event_type: The type of the event to create
+ key: The aggregation key used for m.annotation relation type.
+ content: The content of the created event. Will be modified to configure
+ the m.relates_to key based on the other provided parameters.
+ access_token: The access token used to send the relation, defaults
+ to `self.user_token`
+ parent_id: The event_id this relation relates to. If None, then self.parent_id
+
+ Returns:
+ FakeChannel
"""
+ if not access_token:
+ access_token = self.user_token
+
+ original_id = parent_id if parent_id else self.parent_id
+
+ if content is None:
+ content = {}
+ content["m.relates_to"] = {
+ "event_id": original_id,
+ "rel_type": relation_type,
+ }
+ if key is not None:
+ content["m.relates_to"]["key"] = key
+
+ channel = self.make_request(
+ "POST",
+ f"/_matrix/client/v3/rooms/{self.room}/send/{event_type}",
+ content,
+ access_token=access_token,
+ )
+ return channel
+
+
+class RelationsTestCase(BaseRelationsTestCase):
+ def test_send_relation(self) -> None:
+ """Tests that sending a relation works."""
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
self.assertEqual(200, channel.code, channel.json_body)
@@ -79,7 +130,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/rooms/%s/event/%s" % (self.room, event_id),
+ f"/rooms/{self.room}/event/{event_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -317,9 +368,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# Request /sync, limiting it such that only the latest event is returned
# (and not the relation).
- filter = urllib.parse.quote_plus(
- '{"room": {"timeline": {"limit": 1}}}'.encode()
- )
+ filter = urllib.parse.quote_plus(b'{"room": {"timeline": {"limit": 1}}}')
channel = self.make_request(
"GET", f"/sync?filter={filter}", access_token=self.user_token
)
@@ -404,8 +453,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s?limit=1%s"
- % (self.room, self.parent_id, from_token),
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}?limit=1{from_token}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -544,8 +592,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s"
- % (self.room, self.parent_id),
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -560,47 +607,13 @@ class RelationsTestCase(unittest.HomeserverTestCase):
},
)
- def test_aggregation_redactions(self) -> None:
- """Test that annotations get correctly aggregated after a redaction."""
-
- channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
- self.assertEqual(200, channel.code, channel.json_body)
- to_redact_event_id = channel.json_body["event_id"]
-
- channel = self._send_relation(
- RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Now lets redact one of the 'a' reactions
- channel = self.make_request(
- "POST",
- "/_matrix/client/r0/rooms/%s/redact/%s" % (self.room, to_redact_event_id),
- access_token=self.user_token,
- content={},
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- channel = self.make_request(
- "GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s"
- % (self.room, self.parent_id),
- access_token=self.user_token,
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- self.assertEqual(
- channel.json_body,
- {"chunk": [{"type": "m.reaction", "key": "a", "count": 1}]},
- )
-
def test_aggregation_must_be_annotation(self) -> None:
"""Test that aggregations must be annotations."""
channel = self.make_request(
"GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s/%s?limit=1"
- % (self.room, self.parent_id, RelationTypes.REPLACE),
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations"
+ f"/{self.parent_id}/{RelationTypes.REPLACE}?limit=1",
access_token=self.user_token,
)
self.assertEqual(400, channel.code, channel.json_body)
@@ -986,9 +999,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# Request sync, but limit the timeline so it becomes limited (and includes
# bundled aggregations).
- filter = urllib.parse.quote_plus(
- '{"room": {"timeline": {"limit": 2}}}'.encode()
- )
+ filter = urllib.parse.quote_plus(b'{"room": {"timeline": {"limit": 2}}}')
channel = self.make_request(
"GET", f"/sync?filter={filter}", access_token=self.user_token
)
@@ -1053,7 +1064,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/rooms/%s/event/%s" % (self.room, self.parent_id),
+ f"/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1096,7 +1107,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/rooms/%s/event/%s" % (self.room, reply),
+ f"/rooms/{self.room}/event/{reply}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1198,7 +1209,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# Request the original event.
channel = self.make_request(
"GET",
- "/rooms/%s/event/%s" % (self.room, self.parent_id),
+ f"/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1217,102 +1228,6 @@ class RelationsTestCase(unittest.HomeserverTestCase):
{"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
)
- def test_relations_redaction_redacts_edits(self) -> None:
- """Test that edits of an event are redacted when the original event
- is redacted.
- """
- # Send a new event
- res = self.helper.send(self.room, body="Heyo!", tok=self.user_token)
- original_event_id = res["event_id"]
-
- # Add a relation
- channel = self._send_relation(
- RelationTypes.REPLACE,
- "m.room.message",
- parent_id=original_event_id,
- content={
- "msgtype": "m.text",
- "body": "Wibble",
- "m.new_content": {"msgtype": "m.text", "body": "First edit"},
- },
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Check the relation is returned
- channel = self.make_request(
- "GET",
- "/_matrix/client/unstable/rooms/%s/relations/%s/m.replace/m.room.message"
- % (self.room, original_event_id),
- access_token=self.user_token,
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- self.assertIn("chunk", channel.json_body)
- self.assertEqual(len(channel.json_body["chunk"]), 1)
-
- # Redact the original event
- channel = self.make_request(
- "PUT",
- "/rooms/%s/redact/%s/%s"
- % (self.room, original_event_id, "test_relations_redaction_redacts_edits"),
- access_token=self.user_token,
- content="{}",
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Try to check for remaining m.replace relations
- channel = self.make_request(
- "GET",
- "/_matrix/client/unstable/rooms/%s/relations/%s/m.replace/m.room.message"
- % (self.room, original_event_id),
- access_token=self.user_token,
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Check that no relations are returned
- self.assertIn("chunk", channel.json_body)
- self.assertEqual(channel.json_body["chunk"], [])
-
- def test_aggregations_redaction_prevents_access_to_aggregations(self) -> None:
- """Test that annotations of an event are redacted when the original event
- is redacted.
- """
- # Send a new event
- res = self.helper.send(self.room, body="Hello!", tok=self.user_token)
- original_event_id = res["event_id"]
-
- # Add a relation
- channel = self._send_relation(
- RelationTypes.ANNOTATION, "m.reaction", key="👍", parent_id=original_event_id
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Redact the original
- channel = self.make_request(
- "PUT",
- "/rooms/%s/redact/%s/%s"
- % (
- self.room,
- original_event_id,
- "test_aggregations_redaction_prevents_access_to_aggregations",
- ),
- access_token=self.user_token,
- content="{}",
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- # Check that aggregations returns zero
- channel = self.make_request(
- "GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s/m.annotation/m.reaction"
- % (self.room, original_event_id),
- access_token=self.user_token,
- )
- self.assertEqual(200, channel.code, channel.json_body)
-
- self.assertIn("chunk", channel.json_body)
- self.assertEqual(channel.json_body["chunk"], [])
-
def test_unknown_relations(self) -> None:
"""Unknown relations should be accepted."""
channel = self._send_relation("m.relation.test", "m.room.test")
@@ -1321,8 +1236,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET",
- "/_matrix/client/unstable/rooms/%s/relations/%s?limit=1"
- % (self.room, self.parent_id),
+ f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1343,7 +1257,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# When bundling the unknown relation is not included.
channel = self.make_request(
"GET",
- "/rooms/%s/event/%s" % (self.room, self.parent_id),
+ f"/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1352,8 +1266,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# But unknown relations can be directly queried.
channel = self.make_request(
"GET",
- "/_matrix/client/unstable/rooms/%s/aggregations/%s?limit=1"
- % (self.room, self.parent_id),
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}?limit=1",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1369,58 +1282,6 @@ class RelationsTestCase(unittest.HomeserverTestCase):
raise AssertionError(f"Event {self.parent_id} not found in chunk")
- def _send_relation(
- self,
- relation_type: str,
- event_type: str,
- key: Optional[str] = None,
- content: Optional[dict] = None,
- access_token: Optional[str] = None,
- parent_id: Optional[str] = None,
- ) -> FakeChannel:
- """Helper function to send a relation pointing at `self.parent_id`
-
- Args:
- relation_type: One of `RelationTypes`
- event_type: The type of the event to create
- key: The aggregation key used for m.annotation relation type.
- content: The content of the created event. Will be modified to configure
- the m.relates_to key based on the other provided parameters.
- access_token: The access token used to send the relation, defaults
- to `self.user_token`
- parent_id: The event_id this relation relates to. If None, then self.parent_id
-
- Returns:
- FakeChannel
- """
- if not access_token:
- access_token = self.user_token
-
- original_id = parent_id if parent_id else self.parent_id
-
- if content is None:
- content = {}
- content["m.relates_to"] = {
- "event_id": original_id,
- "rel_type": relation_type,
- }
- if key is not None:
- content["m.relates_to"]["key"] = key
-
- channel = self.make_request(
- "POST",
- f"/_matrix/client/v3/rooms/{self.room}/send/{event_type}",
- content,
- access_token=access_token,
- )
- return channel
-
- def _create_user(self, localpart: str) -> Tuple[str, str]:
- user_id = self.register_user(localpart, "abc123")
- access_token = self.login(localpart, "abc123")
-
- return user_id, access_token
-
def test_background_update(self) -> None:
"""Test the event_arbitrary_relations background update."""
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
@@ -1482,3 +1343,112 @@ class RelationsTestCase(unittest.HomeserverTestCase):
[ev["event_id"] for ev in channel.json_body["chunk"]],
[annotation_event_id_good, thread_event_id],
)
+
+
+class RelationRedactionTestCase(BaseRelationsTestCase):
+ """Test the behaviour of relations when the parent or child event is redacted."""
+
+ def _redact(self, event_id: str) -> None:
+ channel = self.make_request(
+ "POST",
+ f"/_matrix/client/r0/rooms/{self.room}/redact/{event_id}",
+ access_token=self.user_token,
+ content={},
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ def test_redact_relation_annotation(self) -> None:
+ """Test that annotations of an event are properly handled after the
+ annotation is redacted.
+ """
+ channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
+ self.assertEqual(200, channel.code, channel.json_body)
+ to_redact_event_id = channel.json_body["event_id"]
+
+ channel = self._send_relation(
+ RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ # Redact one of the reactions.
+ self._redact(to_redact_event_id)
+
+ # Ensure that the aggregations are correct.
+ channel = self.make_request(
+ "GET",
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}",
+ access_token=self.user_token,
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ self.assertEqual(
+ channel.json_body,
+ {"chunk": [{"type": "m.reaction", "key": "a", "count": 1}]},
+ )
+
+ def test_redact_relation_edit(self) -> None:
+ """Test that edits of an event are redacted when the original event
+ is redacted.
+ """
+ # Add a relation
+ channel = self._send_relation(
+ RelationTypes.REPLACE,
+ "m.room.message",
+ parent_id=self.parent_id,
+ content={
+ "msgtype": "m.text",
+ "body": "Wibble",
+ "m.new_content": {"msgtype": "m.text", "body": "First edit"},
+ },
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ # Check the relation is returned
+ channel = self.make_request(
+ "GET",
+ f"/_matrix/client/unstable/rooms/{self.room}/relations"
+ f"/{self.parent_id}/m.replace/m.room.message",
+ access_token=self.user_token,
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ self.assertIn("chunk", channel.json_body)
+ self.assertEqual(len(channel.json_body["chunk"]), 1)
+
+ # Redact the original event
+ self._redact(self.parent_id)
+
+ # Try to check for remaining m.replace relations
+ channel = self.make_request(
+ "GET",
+ f"/_matrix/client/unstable/rooms/{self.room}/relations"
+ f"/{self.parent_id}/m.replace/m.room.message",
+ access_token=self.user_token,
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ # Check that no relations are returned
+ self.assertIn("chunk", channel.json_body)
+ self.assertEqual(channel.json_body["chunk"], [])
+
+ def test_redact_parent(self) -> None:
+ """Test that annotations of an event are redacted when the original event
+ is redacted.
+ """
+ # Add a relation
+ channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ # Redact the original event.
+ self._redact(self.parent_id)
+
+ # Check that aggregations returns zero
+ channel = self.make_request(
+ "GET",
+ f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}/m.annotation/m.reaction",
+ access_token=self.user_token,
+ )
+ self.assertEqual(200, channel.code, channel.json_body)
+
+ self.assertIn("chunk", channel.json_body)
+ self.assertEqual(channel.json_body["chunk"], [])
diff --git a/tox.ini b/tox.ini
index 04b972e2c5..8d6aa7580b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -38,15 +38,7 @@ lint_targets =
setup.py
synapse
tests
- scripts
# annoyingly, black doesn't find these so we have to list them
- scripts/export_signing_key
- scripts/generate_config
- scripts/generate_log_config
- scripts/hash_password
- scripts/register_new_matrix_user
- scripts/synapse_port_db
- scripts/update_synapse_database
scripts-dev
scripts-dev/build_debian_packages
scripts-dev/sign_json
|