diff --git a/.ci/scripts/auditwheel_wrapper.py b/.ci/scripts/auditwheel_wrapper.py
new file mode 100755
index 0000000000..a33b39314f
--- /dev/null
+++ b/.ci/scripts/auditwheel_wrapper.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# Copyright 2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
+# compatible wheel, if so rename the wheel before repairing it.
+
+import argparse
+import os
+import subprocess
+from typing import Optional
+from zipfile import ZipFile
+
+from packaging.tags import Tag
+from packaging.utils import parse_wheel_filename
+from packaging.version import Version
+
+
+def check_is_abi3_compatible(wheel_file: str) -> None:
+ """Check the contents of the built wheel for any `.so` files that are *not*
+ abi3 compatible.
+ """
+
+ with ZipFile(wheel_file, "r") as wheel:
+ for file in wheel.namelist():
+ if not file.endswith(".so"):
+ continue
+
+ if not file.endswith(".abi3.so"):
+ raise Exception(f"Found non-abi3 lib: {file}")
+
+
+def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
+ """Replaces the cpython wheel file with a ABI3 compatible wheel"""
+
+ if tag.abi == "abi3":
+ # Nothing to do.
+ return wheel_file
+
+ check_is_abi3_compatible(wheel_file)
+
+ abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
+
+ dirname = os.path.dirname(wheel_file)
+ new_wheel_file = os.path.join(
+ dirname,
+ f"{name}-{version}-{abi3_tag}.whl",
+ )
+
+ os.rename(wheel_file, new_wheel_file)
+
+ print("Renamed wheel to", new_wheel_file)
+
+ return new_wheel_file
+
+
+def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
+ """Entry point"""
+
+ # Parse the wheel file name into its parts. Note that `parse_wheel_filename`
+ # normalizes the package name (i.e. it converts matrix_synapse ->
+ # matrix-synapse), which is not what we want.
+ _, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
+ name = os.path.basename(wheel_file).split("-")[0]
+
+ if len(tags) != 1:
+ # We expect only a wheel file with only a single tag
+ raise Exception(f"Unexpectedly found multiple tags: {tags}")
+
+ tag = next(iter(tags))
+
+ if build:
+ # We don't use build tags in Synapse
+ raise Exception(f"Unexpected build tag: {build}")
+
+ # If the wheel is for cpython then convert it into an abi3 wheel.
+ if tag.interpreter.startswith("cp"):
+ wheel_file = cpython(wheel_file, name, version, tag)
+
+ # Finally, repair the wheel.
+ if archs is not None:
+ # If we are given archs then we are on macos and need to use
+ # `delocate-listdeps`.
+ subprocess.run(["delocate-listdeps", wheel_file], check=True)
+ subprocess.run(
+ ["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
+ check=True,
+ )
+ else:
+ subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
+
+ parser.add_argument(
+ "--wheel-dir",
+ "-w",
+ metavar="WHEEL_DIR",
+ help="Directory to store delocated wheels",
+ required=True,
+ )
+
+ parser.add_argument(
+ "--require-archs",
+ metavar="archs",
+ default=None,
+ )
+
+ parser.add_argument(
+ "wheel_file",
+ metavar="WHEEL_FILE",
+ )
+
+ args = parser.parse_args()
+
+ wheel_file = args.wheel_file
+ wheel_dir = args.wheel_dir
+ archs = args.require_archs
+
+ main(wheel_file, wheel_dir, archs)
diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py
new file mode 100755
index 0000000000..0cdc20e19c
--- /dev/null
+++ b/.ci/scripts/calculate_jobs.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+# Copyright 2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Calculate the trial jobs to run based on if we're in a PR or not.
+
+import json
+import os
+
+
+def set_output(key: str, value: str):
+ # See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
+ with open(os.environ["GITHUB_OUTPUT"], "at") as f:
+ print(f"{key}={value}", file=f)
+
+
+IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
+
+# First calculate the various trial jobs.
+#
+# For each type of test we only run on Py3.7 on PRs
+
+trial_sqlite_tests = [
+ {
+ "python-version": "3.7",
+ "database": "sqlite",
+ "extras": "all",
+ }
+]
+
+if not IS_PR:
+ trial_sqlite_tests.extend(
+ {
+ "python-version": version,
+ "database": "sqlite",
+ "extras": "all",
+ }
+ for version in ("3.8", "3.9", "3.10", "3.11")
+ )
+
+
+trial_postgres_tests = [
+ {
+ "python-version": "3.7",
+ "database": "postgres",
+ "postgres-version": "11",
+ "extras": "all",
+ }
+]
+
+if not IS_PR:
+ trial_postgres_tests.append(
+ {
+ "python-version": "3.11",
+ "database": "postgres",
+ "postgres-version": "15",
+ "extras": "all",
+ }
+ )
+
+trial_no_extra_tests = [
+ {
+ "python-version": "3.7",
+ "database": "sqlite",
+ "extras": "",
+ }
+]
+
+print("::group::Calculated trial jobs")
+print(
+ json.dumps(
+ trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
+ )
+)
+print("::endgroup::")
+
+test_matrix = json.dumps(
+ trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
+)
+set_output("trial_test_matrix", test_matrix)
+
+
+# First calculate the various sytest jobs.
+#
+# For each type of test we only run on focal on PRs
+
+
+sytest_tests = [
+ {
+ "sytest-tag": "focal",
+ },
+ {
+ "sytest-tag": "focal",
+ "postgres": "postgres",
+ },
+ {
+ "sytest-tag": "focal",
+ "postgres": "multi-postgres",
+ "workers": "workers",
+ },
+]
+
+if not IS_PR:
+ sytest_tests.extend(
+ [
+ {
+ "sytest-tag": "testing",
+ "postgres": "postgres",
+ },
+ {
+ "sytest-tag": "buster",
+ "postgres": "multi-postgres",
+ "workers": "workers",
+ },
+ ]
+ )
+
+
+print("::group::Calculated sytest jobs")
+print(json.dumps(sytest_tests, indent=4))
+print("::endgroup::")
+
+test_matrix = json.dumps(sytest_tests)
+set_output("sytest_test_matrix", test_matrix)
diff --git a/.ci/scripts/gotestfmt b/.ci/scripts/gotestfmt
new file mode 100755
index 0000000000..83e0ec6361
--- /dev/null
+++ b/.ci/scripts/gotestfmt
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# wraps `gotestfmt`, hiding output from successful packages unless
+# all tests passed.
+
+set -o pipefail
+set -e
+
+# tee the test results to a log, whilst also piping them into gotestfmt,
+# telling it to hide successful results, so that we can clearly see
+# unsuccessful results.
+tee complement.log | gotestfmt -hide successful-packages
+
+# gotestfmt will exit non-zero if there were any failures, so if we got to this
+# point, we must have had a successful result.
+echo "All tests successful; showing all test results"
+
+# Pipe the test results back through gotestfmt, showing all results.
+# The log file consists of JSON lines giving the test results, interspersed
+# with regular stdout lines (including reports of downloaded packages).
+grep '^{"Time":' complement.log | gotestfmt
diff --git a/.ci/scripts/postgres_exec.py b/.ci/scripts/postgres_exec.py
deleted file mode 100755
index 0f39a336d5..0000000000
--- a/.ci/scripts/postgres_exec.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2019 The Matrix.org Foundation C.I.C.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-
-import psycopg2
-
-# a very simple replacment for `psql`, to make up for the lack of the postgres client
-# libraries in the synapse docker image.
-
-# We use "postgres" as a database because it's bound to exist and the "synapse" one
-# doesn't exist yet.
-db_conn = psycopg2.connect(
- user="postgres", host="localhost", password="postgres", dbname="postgres"
-)
-db_conn.autocommit = True
-cur = db_conn.cursor()
-for c in sys.argv[1:]:
- cur.execute(c)
diff --git a/.ci/scripts/test_old_deps.sh b/.ci/scripts/prepare_old_deps.sh
index 478c8d639a..7e4f060b17 100755
--- a/.ci/scripts/test_old_deps.sh
+++ b/.ci/scripts/prepare_old_deps.sh
@@ -5,18 +5,8 @@
# - creates a venv with these old versions using poetry; and finally
# - invokes `trial` to run the tests with old deps.
-# Prevent tzdata from asking for user input
-export DEBIAN_FRONTEND=noninteractive
-
set -ex
-apt-get update
-apt-get install -y \
- python3 python3-dev python3-pip python3-venv pipx \
- libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
-
-export LANG="C.UTF-8"
-
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
@@ -33,12 +23,6 @@ export VIRTUALENV_NO_DOWNLOAD=1
# a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here.
-# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
-# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
-# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
-# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
-# runs on 3.8 (#12343).
-
sed -i \
-e "s/[~>]=/==/g" \
-e '/^python = "^/!s/\^/==/g' \
@@ -55,7 +39,7 @@ sed -i \
# toml file. This means we don't have to ensure compatibility between old deps and
# dev tools.
-pip install --user toml
+pip install toml wheel
REMOVE_DEV_DEPENDENCIES="
import toml
@@ -69,8 +53,8 @@ with open('pyproject.toml', 'w') as f:
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
-pipx install poetry==1.1.14
-~/.local/bin/poetry lock
+pip install poetry==1.2.0
+poetry lock
echo "::group::Patched pyproject.toml"
cat pyproject.toml
@@ -78,6 +62,3 @@ echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"
-
-~/.local/bin/poetry install -E "all test"
-~/.local/bin/poetry run trial --jobs=2 tests
diff --git a/.ci/scripts/setup_complement_prerequisites.sh b/.ci/scripts/setup_complement_prerequisites.sh
index 4848901cbf..42ef654167 100755
--- a/.ci/scripts/setup_complement_prerequisites.sh
+++ b/.ci/scripts/setup_complement_prerequisites.sh
@@ -21,7 +21,7 @@ endblock
block Install Complement Dependencies
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
- go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
+ go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
endblock
block Install custom gotestfmt template
diff --git a/.ci/scripts/test_export_data_command.sh b/.ci/scripts/test_export_data_command.sh
index 033fd3e24e..9f6c49acff 100755
--- a/.ci/scripts/test_export_data_command.sh
+++ b/.ci/scripts/test_export_data_command.sh
@@ -32,7 +32,7 @@ else
fi
# Create the PostgreSQL database.
-poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
+psql -c "CREATE DATABASE synapse"
# Port the SQLite databse to postgres so we can check command works against postgres
echo "+++ Port SQLite3 databse to postgres"
diff --git a/.ci/scripts/test_synapse_port_db.sh b/.ci/scripts/test_synapse_port_db.sh
index b07a6b5d08..8cc41d3dca 100755
--- a/.ci/scripts/test_synapse_port_db.sh
+++ b/.ci/scripts/test_synapse_port_db.sh
@@ -2,27 +2,27 @@
#
# Test script for 'synapse_port_db'.
# - configures synapse and a postgres server.
-# - runs the port script on a prepopulated test sqlite db
-# - also runs it against an new sqlite db
+# - runs the port script on a prepopulated test sqlite db. Checks that the
+# return code is zero.
+# - reruns the port script on the same sqlite db, targetting the same postgres db.
+# Checks that the return code is zero.
+# - runs the port script against a new sqlite db. Checks the return code is zero.
#
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
-set -xe
+set -xe -o pipefail
cd "$(dirname "$0")/../.."
echo "--- Generate the signing key"
-
-# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
-
-# Make sure the SQLite3 database is using the latest schema and has no pending background update.
+# Make sure the SQLite3 database is using the latest schema and has no pending background updates.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database.
-poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
+psql -c "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
@@ -45,9 +45,23 @@ rm .ci/test_db.db
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database.
-poetry run .ci/scripts/postgres_exec.py \
- "DROP DATABASE synapse" \
- "CREATE DATABASE synapse"
+psql \
+ -c "DROP DATABASE synapse" \
+ -c "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+
+echo "--- Create a brand new postgres database from schema"
+cp .ci/postgres-config.yaml .ci/postgres-config-unported.yaml
+sed -i -e 's/database: synapse/database: synapse_unported/' .ci/postgres-config-unported.yaml
+psql -c "CREATE DATABASE synapse_unported"
+poetry run update_synapse_database --database-config .ci/postgres-config-unported.yaml --run-background-updates
+
+echo "+++ Comparing ported schema with unported schema"
+# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
+psql synapse -c "DROP TABLE port_from_sqlite3;"
+pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
+pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
+# By default, `diff` returns zero if there are no changes and nonzero otherwise
+diff -u unported.sql ported.sql | tee schema_diff
\ No newline at end of file
|