diff options
Diffstat (limited to 'scripts-dev')
-rwxr-xr-x | scripts-dev/build_debian_packages | 154 |
1 files changed, 154 insertions, 0 deletions
diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages new file mode 100755 index 0000000000..577d93e6f6 --- /dev/null +++ b/scripts-dev/build_debian_packages @@ -0,0 +1,154 @@ +#!/usr/bin/env python3 + +# Build the Debian packages using Docker images. +# +# This script builds the Docker images and then executes them sequentially, each +# one building a Debian package for the targeted operating system. It is +# designed to be a "single command" to produce all the images. +# +# By default, builds for all known distributions, but a list of distributions +# can be passed on the commandline for debugging. + +import argparse +from concurrent.futures import ThreadPoolExecutor +import os +import signal +import subprocess +import sys +import threading + +DISTS = ( + "debian:stretch", + "debian:buster", + "debian:sid", + "ubuntu:xenial", + "ubuntu:bionic", + "ubuntu:cosmic", +) + +DESC = '''\ +Builds .debs for synapse, using a Docker image for the build environment. + +By default, builds for all known distributions, but a list of distributions +can be passed on the commandline for debugging. +''' + + +class Builder(object): + def __init__(self, redirect_stdout=False): + self.redirect_stdout = redirect_stdout + self.active_containers = set() + self._lock = threading.Lock() + self._failed = False + + def run_build(self, dist): + """Build deb for a single distribution""" + + if self._failed: + print("not building %s due to earlier failure" % (dist, )) + raise Exception("failed") + + try: + self._inner_build(dist) + except Exception as e: + print("build of %s failed: %s" % (dist, e), file=sys.stderr) + self._failed = True + raise + + def _inner_build(self, dist): + projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + os.chdir(projdir) + + tag = dist.split(":", 1)[1] + + # Make the dir where the debs will live. + # + # Note that we deliberately put this outside the source tree, otherwise + # we tend to get source packages which are full of debs. (We could hack + # around that with more magic in the build_debian.sh script, but that + # doesn't solve the problem for natively-run dpkg-buildpakage). + debsdir = os.path.join(projdir, '../debs') + os.makedirs(debsdir, exist_ok=True) + + if self.redirect_stdout: + logfile = os.path.join(debsdir, "%s.buildlog" % (tag, )) + print("building %s: directing output to %s" % (dist, logfile)) + stdout = open(logfile, "w") + else: + stdout = None + + # first build a docker image for the build environment + subprocess.check_call([ + "docker", "build", + "--tag", "dh-venv-builder:" + tag, + "--build-arg", "distro=" + dist, + "-f", "docker/Dockerfile-dhvirtualenv", + "docker", + ], stdout=stdout, stderr=subprocess.STDOUT) + + container_name = "synapse_build_" + tag + with self._lock: + self.active_containers.add(container_name) + + # then run the build itself + subprocess.check_call([ + "docker", "run", + "--rm", + "--name", container_name, + "--volume=" + projdir + ":/synapse/source:ro", + "--volume=" + debsdir + ":/debs", + "-e", "TARGET_USERID=%i" % (os.getuid(), ), + "-e", "TARGET_GROUPID=%i" % (os.getgid(), ), + "dh-venv-builder:" + tag, + ], stdout=stdout, stderr=subprocess.STDOUT) + + with self._lock: + self.active_containers.remove(container_name) + + if stdout is not None: + stdout.close() + print("Completed build of %s" % (dist, )) + + def kill_containers(self): + with self._lock: + active = list(self.active_containers) + + for c in active: + print("killing container %s" % (c,)) + subprocess.run([ + "docker", "kill", c, + ], stdout=subprocess.DEVNULL) + with self._lock: + self.active_containers.remove(c) + + +def run_builds(dists, jobs=1): + builder = Builder(redirect_stdout=(jobs > 1)) + + def sig(signum, _frame): + print("Caught SIGINT") + builder.kill_containers() + signal.signal(signal.SIGINT, sig) + + with ThreadPoolExecutor(max_workers=jobs) as e: + res = e.map(builder.run_build, dists) + + # make sure we consume the iterable so that exceptions are raised. + for r in res: + pass + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=DESC, + ) + parser.add_argument( + '-j', '--jobs', type=int, default=1, + help='specify the number of builds to run in parallel', + ) + parser.add_argument( + 'dist', nargs='*', default=DISTS, + help='a list of distributions to build for. Default: %(default)s', + ) + args = parser.parse_args() + run_builds(dists=args.dist, jobs=args.jobs) |