diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py
new file mode 100755
index 0000000000..32fe7f50de
--- /dev/null
+++ b/scripts-dev/check_schema_delta.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+
+# Check that no schema deltas have been added to the wrong version.
+
+import re
+from typing import Any, Dict, List
+
+import click
+import git
+
+SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
+
+
+@click.command()
+@click.option(
+ "--force-colors",
+ is_flag=True,
+ flag_value=True,
+ default=None,
+ help="Always output ANSI colours",
+)
+def main(force_colors: bool) -> None:
+ click.secho(
+ "+++ Checking schema deltas are in the right folder",
+ fg="green",
+ bold=True,
+ color=force_colors,
+ )
+
+ click.secho("Updating repo...")
+
+ repo = git.Repo()
+ repo.remote().fetch()
+
+ click.secho("Getting current schema version...")
+
+ r = repo.git.show("origin/develop:synapse/storage/schema/__init__.py")
+
+ locals: Dict[str, Any] = {}
+ exec(r, locals)
+ current_schema_version = locals["SCHEMA_VERSION"]
+
+ click.secho(f"Current schema version: {current_schema_version}")
+
+ diffs: List[git.Diff] = repo.remote().refs.develop.commit.diff(None)
+
+ seen_deltas = False
+ bad_files = []
+ for diff in diffs:
+ if not diff.new_file or diff.b_path is None:
+ continue
+
+ match = SCHEMA_FILE_REGEX.match(diff.b_path)
+ if not match:
+ continue
+
+ seen_deltas = True
+
+ _, delta_version, _ = match.groups()
+
+ if delta_version != str(current_schema_version):
+ bad_files.append(diff.b_path)
+
+ if not seen_deltas:
+ click.secho(
+ "No deltas found.",
+ fg="green",
+ bold=True,
+ color=force_colors,
+ )
+ return
+
+ if not bad_files:
+ click.secho(
+ f"All deltas are in the correct folder: {current_schema_version}!",
+ fg="green",
+ bold=True,
+ color=force_colors,
+ )
+ return
+
+ bad_files.sort()
+
+ click.secho(
+ "Found deltas in the wrong folder!",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
+
+ for f in bad_files:
+ click.secho(
+ f"\t{f}",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
+
+ click.secho()
+ click.secho(
+ f"Please move these files to delta/{current_schema_version}/",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
+
+ click.get_current_context().exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 3c472c576e..f1843717cb 100755
--- a/scripts-dev/complement.sh
+++ b/scripts-dev/complement.sh
@@ -18,12 +18,21 @@
# argument to the script. Complement will then only run those tests. If
# no regex is supplied, all tests are run. For example;
#
-# ./complement.sh "TestOutboundFederation(Profile|Send)"
+# ./complement.sh -run "TestOutboundFederation(Profile|Send)"
#
# Exit if a line returns a non-zero exit code
set -e
+
+# Helper to emit annotations that collapse portions of the log in GitHub Actions
+echo_if_github() {
+ if [[ -n "$GITHUB_WORKFLOW" ]]; then
+ echo $*
+ fi
+}
+
+
# enable buildkit for the docker builds
export DOCKER_BUILDKIT=1
@@ -41,19 +50,40 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then
fi
# Build the base Synapse image from the local checkout
+echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
docker build -t matrixdotorg/synapse -f "docker/Dockerfile" .
+echo_if_github "::endgroup::"
+
+# Build the workers docker image (from the base Synapse image we just built).
+echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
+docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
+echo_if_github "::endgroup::"
+
+# Build the unified Complement image (from the worker Synapse image we just built).
+echo_if_github "::group::Build Docker image: complement/Dockerfile"
+docker build -t complement-synapse \
+ -f "docker/complement/Dockerfile" "docker/complement"
+echo_if_github "::endgroup::"
+
+export COMPLEMENT_BASE_IMAGE=complement-synapse
extra_test_args=()
test_tags="synapse_blacklist,msc2716,msc3030,msc3787"
-# If we're using workers, modify the docker files slightly.
+# All environment variables starting with PASS_ will be shared.
+# (The prefix is stripped off before reaching the container.)
+export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
+
+# It takes longer than 10m to run the whole suite.
+extra_test_args+=("-timeout=60m")
+
if [[ -n "$WORKERS" ]]; then
- # Build the workers docker image (from the base Synapse image).
- docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
+ # Use workers.
+ export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
- export COMPLEMENT_BASE_IMAGE=complement-synapse-workers
- COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile
+ # Workers can only use Postgres as a database.
+ export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
# And provide some more configuration to complement.
@@ -61,21 +91,19 @@ if [[ -n "$WORKERS" ]]; then
# time (the main problem is that we start 14 python processes for each test,
# and complement likes to do two of them in parallel).
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
-
- # ... and it takes longer than 10m to run the whole suite.
- extra_test_args+=("-timeout=60m")
else
- export COMPLEMENT_BASE_IMAGE=complement-synapse
- COMPLEMENT_DOCKERFILE=Dockerfile
+ export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
+ if [[ -n "$POSTGRES" ]]; then
+ export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
+ else
+ export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
+ fi
# We only test faster room joins on monoliths, because they are purposefully
# being developed without worker support to start with.
test_tags="$test_tags,faster_joins"
fi
-# Build the Complement image from the Synapse image we just built.
-docker build -t $COMPLEMENT_BASE_IMAGE -f "docker/complement/$COMPLEMENT_DOCKERFILE" "docker/complement"
-
# Run the tests!
echo "Images built; running complement"
cd "$COMPLEMENT_DIR"
|