summary refs log tree commit diff
path: root/scripts-dev
diff options
context:
space:
mode:
Diffstat (limited to 'scripts-dev')
-rwxr-xr-xscripts-dev/build_debian_packages105
-rwxr-xr-xscripts-dev/complement.sh25
-rw-r--r--scripts-dev/convert_server_keys.py7
-rwxr-xr-xscripts-dev/lint.sh18
4 files changed, 105 insertions, 50 deletions
diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages

index 07d018db99..546724f89f 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages
@@ -21,18 +21,18 @@ DISTS = ( "debian:buster", "debian:bullseye", "debian:sid", - "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) - "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) - "ubuntu:groovy", # 20.10 (EOL 2021-07-07) + "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) + "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) + "ubuntu:groovy", # 20.10 (EOL 2021-07-07) "ubuntu:hirsute", # 21.04 (EOL 2022-01-05) ) -DESC = '''\ +DESC = """\ Builds .debs for synapse, using a Docker image for the build environment. By default, builds for all known distributions, but a list of distributions can be passed on the commandline for debugging. -''' +""" class Builder(object): @@ -46,7 +46,7 @@ class Builder(object): """Build deb for a single distribution""" if self._failed: - print("not building %s due to earlier failure" % (dist, )) + print("not building %s due to earlier failure" % (dist,)) raise Exception("failed") try: @@ -68,48 +68,65 @@ class Builder(object): # we tend to get source packages which are full of debs. (We could hack # around that with more magic in the build_debian.sh script, but that # doesn't solve the problem for natively-run dpkg-buildpakage). - debsdir = os.path.join(projdir, '../debs') + debsdir = os.path.join(projdir, "../debs") os.makedirs(debsdir, exist_ok=True) if self.redirect_stdout: - logfile = os.path.join(debsdir, "%s.buildlog" % (tag, )) + logfile = os.path.join(debsdir, "%s.buildlog" % (tag,)) print("building %s: directing output to %s" % (dist, logfile)) stdout = open(logfile, "w") else: stdout = None # first build a docker image for the build environment - subprocess.check_call([ - "docker", "build", - "--tag", "dh-venv-builder:" + tag, - "--build-arg", "distro=" + dist, - "-f", "docker/Dockerfile-dhvirtualenv", - "docker", - ], stdout=stdout, stderr=subprocess.STDOUT) + subprocess.check_call( + [ + "docker", + "build", + "--tag", + "dh-venv-builder:" + tag, + "--build-arg", + "distro=" + dist, + "-f", + "docker/Dockerfile-dhvirtualenv", + "docker", + ], + stdout=stdout, + stderr=subprocess.STDOUT, + ) container_name = "synapse_build_" + tag with self._lock: self.active_containers.add(container_name) # then run the build itself - subprocess.check_call([ - "docker", "run", - "--rm", - "--name", container_name, - "--volume=" + projdir + ":/synapse/source:ro", - "--volume=" + debsdir + ":/debs", - "-e", "TARGET_USERID=%i" % (os.getuid(), ), - "-e", "TARGET_GROUPID=%i" % (os.getgid(), ), - "-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), - "dh-venv-builder:" + tag, - ], stdout=stdout, stderr=subprocess.STDOUT) + subprocess.check_call( + [ + "docker", + "run", + "--rm", + "--name", + container_name, + "--volume=" + projdir + ":/synapse/source:ro", + "--volume=" + debsdir + ":/debs", + "-e", + "TARGET_USERID=%i" % (os.getuid(),), + "-e", + "TARGET_GROUPID=%i" % (os.getgid(),), + "-e", + "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), + "dh-venv-builder:" + tag, + ], + stdout=stdout, + stderr=subprocess.STDOUT, + ) with self._lock: self.active_containers.remove(container_name) if stdout is not None: stdout.close() - print("Completed build of %s" % (dist, )) + print("Completed build of %s" % (dist,)) def kill_containers(self): with self._lock: @@ -117,9 +134,14 @@ class Builder(object): for c in active: print("killing container %s" % (c,)) - subprocess.run([ - "docker", "kill", c, - ], stdout=subprocess.DEVNULL) + subprocess.run( + [ + "docker", + "kill", + c, + ], + stdout=subprocess.DEVNULL, + ) with self._lock: self.active_containers.remove(c) @@ -130,31 +152,38 @@ def run_builds(dists, jobs=1, skip_tests=False): def sig(signum, _frame): print("Caught SIGINT") builder.kill_containers() + signal.signal(signal.SIGINT, sig) with ThreadPoolExecutor(max_workers=jobs) as e: res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists) # make sure we consume the iterable so that exceptions are raised. - for r in res: + for _ in res: pass -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=DESC, ) parser.add_argument( - '-j', '--jobs', type=int, default=1, - help='specify the number of builds to run in parallel', + "-j", + "--jobs", + type=int, + default=1, + help="specify the number of builds to run in parallel", ) parser.add_argument( - '--no-check', action='store_true', - help='skip running tests after building', + "--no-check", + action="store_true", + help="skip running tests after building", ) parser.add_argument( - 'dist', nargs='*', default=DISTS, - help='a list of distributions to build for. Default: %(default)s', + "dist", + nargs="*", + default=DISTS, + help="a list of distributions to build for. Default: %(default)s", ) args = parser.parse_args() run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check) diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 1612ab522c..0043964673 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh
@@ -10,6 +10,9 @@ # checkout by setting the COMPLEMENT_DIR environment variable to the # filepath of a local Complement checkout. # +# By default Synapse is run in monolith mode. This can be overridden by +# setting the WORKERS environment variable. +# # A regular expression of test method names can be supplied as the first # argument to the script. Complement will then only run those tests. If # no regex is supplied, all tests are run. For example; @@ -32,10 +35,26 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then echo "Checkout available at 'complement-master'" fi +# If we're using workers, modify the docker files slightly. +if [[ -n "$WORKERS" ]]; then + BASE_IMAGE=matrixdotorg/synapse-workers + BASE_DOCKERFILE=docker/Dockerfile-workers + export COMPLEMENT_BASE_IMAGE=complement-synapse-workers + COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile + # And provide some more configuration to complement. + export COMPLEMENT_CA=true + export COMPLEMENT_VERSION_CHECK_ITERATIONS=500 +else + BASE_IMAGE=matrixdotorg/synapse + BASE_DOCKERFILE=docker/Dockerfile + export COMPLEMENT_BASE_IMAGE=complement-synapse + COMPLEMENT_DOCKERFILE=Synapse.Dockerfile +fi + # Build the base Synapse image from the local checkout -docker build -t matrixdotorg/synapse -f docker/Dockerfile . +docker build -t $BASE_IMAGE -f "$BASE_DOCKERFILE" . # Build the Synapse monolith image from Complement, based on the above image we just built -docker build -t complement-synapse -f "$COMPLEMENT_DIR/dockerfiles/Synapse.Dockerfile" "$COMPLEMENT_DIR/dockerfiles" +docker build -t $COMPLEMENT_BASE_IMAGE -f "$COMPLEMENT_DIR/dockerfiles/$COMPLEMENT_DOCKERFILE" "$COMPLEMENT_DIR/dockerfiles" cd "$COMPLEMENT_DIR" @@ -46,4 +65,4 @@ if [[ -n "$1" ]]; then fi # Run the tests! -COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests +go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index 961dc59f11..d4314a054c 100644 --- a/scripts-dev/convert_server_keys.py +++ b/scripts-dev/convert_server_keys.py
@@ -1,4 +1,3 @@ -import hashlib import json import sys import time @@ -54,15 +53,9 @@ def convert_v1_to_v2(server_name, valid_until, keys, certificate): "server_name": server_name, "verify_keys": {key_id: {"key": key} for key_id, key in keys.items()}, "valid_until_ts": valid_until, - "tls_fingerprints": [fingerprint(certificate)], } -def fingerprint(certificate): - finger = hashlib.sha256(certificate) - return {"sha256": encode_base64(finger.digest())} - - def rows_v2(server, json): valid_until = json["valid_until_ts"] key_json = encode_canonical_json(json) diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index 9761e97594..869eb2372d 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh
@@ -80,8 +80,22 @@ else # then lint everything! if [[ -z ${files+x} ]]; then # Lint all source code files and directories - # Note: this list aims the mirror the one in tox.ini - files=("synapse" "docker" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite") + # Note: this list aims to mirror the one in tox.ini + files=( + "synapse" "docker" "tests" + # annoyingly, black doesn't find these so we have to list them + "scripts/export_signing_key" + "scripts/generate_config" + "scripts/generate_log_config" + "scripts/hash_password" + "scripts/register_new_matrix_user" + "scripts/synapse_port_db" + "scripts-dev" + "scripts-dev/build_debian_packages" + "scripts-dev/sign_json" + "scripts-dev/update_database" + "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite" + ) fi fi