diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages
index d0685c8b35..3bb6e2c7ea 100755
--- a/scripts-dev/build_debian_packages
+++ b/scripts-dev/build_debian_packages
@@ -18,11 +18,9 @@ import threading
from concurrent.futures import ThreadPoolExecutor
DISTS = (
- "debian:stretch",
"debian:buster",
"debian:bullseye",
"debian:sid",
- "ubuntu:xenial",
"ubuntu:bionic",
"ubuntu:focal",
"ubuntu:groovy",
@@ -43,7 +41,7 @@ class Builder(object):
self._lock = threading.Lock()
self._failed = False
- def run_build(self, dist):
+ def run_build(self, dist, skip_tests=False):
"""Build deb for a single distribution"""
if self._failed:
@@ -51,13 +49,13 @@ class Builder(object):
raise Exception("failed")
try:
- self._inner_build(dist)
+ self._inner_build(dist, skip_tests)
except Exception as e:
print("build of %s failed: %s" % (dist, e), file=sys.stderr)
self._failed = True
raise
- def _inner_build(self, dist):
+ def _inner_build(self, dist, skip_tests=False):
projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
os.chdir(projdir)
@@ -101,6 +99,7 @@ class Builder(object):
"--volume=" + debsdir + ":/debs",
"-e", "TARGET_USERID=%i" % (os.getuid(), ),
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ),
+ "-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
"dh-venv-builder:" + tag,
], stdout=stdout, stderr=subprocess.STDOUT)
@@ -124,7 +123,7 @@ class Builder(object):
self.active_containers.remove(c)
-def run_builds(dists, jobs=1):
+def run_builds(dists, jobs=1, skip_tests=False):
builder = Builder(redirect_stdout=(jobs > 1))
def sig(signum, _frame):
@@ -133,7 +132,7 @@ def run_builds(dists, jobs=1):
signal.signal(signal.SIGINT, sig)
with ThreadPoolExecutor(max_workers=jobs) as e:
- res = e.map(builder.run_build, dists)
+ res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
# make sure we consume the iterable so that exceptions are raised.
for r in res:
@@ -149,8 +148,12 @@ if __name__ == '__main__':
help='specify the number of builds to run in parallel',
)
parser.add_argument(
+ '--no-check', action='store_true',
+ help='skip running tests after building',
+ )
+ parser.add_argument(
'dist', nargs='*', default=DISTS,
help='a list of distributions to build for. Default: %(default)s',
)
args = parser.parse_args()
- run_builds(dists=args.dist, jobs=args.jobs)
+ run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 3cde53f5c0..1612ab522c 100755
--- a/scripts-dev/complement.sh
+++ b/scripts-dev/complement.sh
@@ -1,22 +1,49 @@
-#! /bin/bash -eu
+#!/usr/bin/env bash
# This script is designed for developers who want to test their code
# against Complement.
#
# It makes a Synapse image which represents the current checkout,
-# then downloads Complement and runs it with that image.
+# builds a synapse-complement image on top, then runs tests with it.
+#
+# By default the script will fetch the latest Complement master branch and
+# run tests with that. This can be overridden to use a custom Complement
+# checkout by setting the COMPLEMENT_DIR environment variable to the
+# filepath of a local Complement checkout.
+#
+# A regular expression of test method names can be supplied as the first
+# argument to the script. Complement will then only run those tests. If
+# no regex is supplied, all tests are run. For example;
+#
+# ./complement.sh "TestOutboundFederation(Profile|Send)"
+#
+
+# Exit if a line returns a non-zero exit code
+set -e
+# Change to the repository root
cd "$(dirname $0)/.."
+# Check for a user-specified Complement checkout
+if [[ -z "$COMPLEMENT_DIR" ]]; then
+ echo "COMPLEMENT_DIR not set. Fetching the latest Complement checkout..."
+ wget -Nq https://github.com/matrix-org/complement/archive/master.tar.gz
+ tar -xzf master.tar.gz
+ COMPLEMENT_DIR=complement-master
+ echo "Checkout available at 'complement-master'"
+fi
+
# Build the base Synapse image from the local checkout
-docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
+docker build -t matrixdotorg/synapse -f docker/Dockerfile .
+# Build the Synapse monolith image from Complement, based on the above image we just built
+docker build -t complement-synapse -f "$COMPLEMENT_DIR/dockerfiles/Synapse.Dockerfile" "$COMPLEMENT_DIR/dockerfiles"
-# Download Complement
-wget -N https://github.com/matrix-org/complement/archive/master.tar.gz
-tar -xzf master.tar.gz
-cd complement-master
+cd "$COMPLEMENT_DIR"
-# Build the Synapse image from Complement, based on the above image we just built
-docker build -t complement-synapse -f dockerfiles/Synapse.Dockerfile ./dockerfiles
+EXTRA_COMPLEMENT_ARGS=""
+if [[ -n "$1" ]]; then
+ # A test name regex has been set, supply it to Complement
+ EXTRA_COMPLEMENT_ARGS+="-run $1 "
+fi
-# Run the tests on the resulting image!
-COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -count=1 ./tests
+# Run the tests!
+COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
diff --git a/scripts-dev/release.py b/scripts-dev/release.py
new file mode 100755
index 0000000000..1042fa48bc
--- /dev/null
+++ b/scripts-dev/release.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An interactive script for doing a release. See `run()` below.
+"""
+
+import subprocess
+import sys
+from typing import Optional
+
+import click
+import git
+from packaging import version
+from redbaron import RedBaron
+
+
+@click.command()
+def run():
+ """An interactive script to walk through the initial stages of creating a
+ release, including creating release branch, updating changelog and pushing to
+ GitHub.
+
+ Requires the dev dependencies be installed, which can be done via:
+
+ pip install -e .[dev]
+
+ """
+
+ # Make sure we're in a git repo.
+ try:
+ repo = git.Repo()
+ except git.InvalidGitRepositoryError:
+ raise click.ClickException("Not in Synapse repo.")
+
+ if repo.is_dirty():
+ raise click.ClickException("Uncommitted changes exist.")
+
+ click.secho("Updating git repo...")
+ repo.remote().fetch()
+
+ # Parse the AST and load the `__version__` node so that we can edit it
+ # later.
+ with open("synapse/__init__.py") as f:
+ red = RedBaron(f.read())
+
+ version_node = None
+ for node in red:
+ if node.type != "assignment":
+ continue
+
+ if node.target.type != "name":
+ continue
+
+ if node.target.value != "__version__":
+ continue
+
+ version_node = node
+ break
+
+ if not version_node:
+ print("Failed to find '__version__' definition in synapse/__init__.py")
+ sys.exit(1)
+
+ # Parse the current version.
+ current_version = version.parse(version_node.value.value.strip('"'))
+ assert isinstance(current_version, version.Version)
+
+ # Figure out what sort of release we're doing and calcuate the new version.
+ rc = click.confirm("RC", default=True)
+ if current_version.pre:
+ # If the current version is an RC we don't need to bump any of the
+ # version numbers (other than the RC number).
+ base_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro,
+ )
+
+ if rc:
+ new_version = "{}.{}.{}rc{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro,
+ current_version.pre[1] + 1,
+ )
+ else:
+ new_version = base_version
+ else:
+ # If this is a new release cycle then we need to know if its a major
+ # version bump or a hotfix.
+ release_type = click.prompt(
+ "Release type",
+ type=click.Choice(("major", "hotfix")),
+ show_choices=True,
+ default="major",
+ )
+
+ if release_type == "major":
+ base_version = new_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor + 1,
+ 0,
+ )
+ if rc:
+ new_version = "{}.{}.{}rc1".format(
+ current_version.major,
+ current_version.minor + 1,
+ 0,
+ )
+
+ else:
+ base_version = new_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro + 1,
+ )
+ if rc:
+ new_version = "{}.{}.{}rc1".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro + 1,
+ )
+
+ # Confirm the calculated version is OK.
+ if not click.confirm(f"Create new version: {new_version}?", default=True):
+ click.get_current_context().abort()
+
+ # Switch to the release branch.
+ release_branch_name = f"release-v{base_version}"
+ release_branch = find_ref(repo, release_branch_name)
+ if release_branch:
+ if release_branch.is_remote():
+ # If the release branch only exists on the remote we check it out
+ # locally.
+ repo.git.checkout(release_branch_name)
+ release_branch = repo.active_branch
+ else:
+ # If a branch doesn't exist we create one. We ask which one branch it
+ # should be based off, defaulting to sensible values depending on the
+ # release type.
+ if current_version.is_prerelease:
+ default = release_branch_name
+ elif release_type == "major":
+ default = "develop"
+ else:
+ default = "master"
+
+ branch_name = click.prompt(
+ "Which branch should the release be based on?", default=default
+ )
+
+ base_branch = find_ref(repo, branch_name)
+ if not base_branch:
+ print(f"Could not find base branch {branch_name}!")
+ click.get_current_context().abort()
+
+ # Check out the base branch and ensure it's up to date
+ repo.head.reference = base_branch
+ repo.head.reset(index=True, working_tree=True)
+ if not base_branch.is_remote():
+ update_branch(repo)
+
+ # Create the new release branch
+ release_branch = repo.create_head(release_branch_name, commit=base_branch)
+
+ # Switch to the release branch and ensure its up to date.
+ repo.git.checkout(release_branch_name)
+ update_branch(repo)
+
+ # Update the `__version__` variable and write it back to the file.
+ version_node.value = '"' + new_version + '"'
+ with open("synapse/__init__.py", "w") as f:
+ f.write(red.dumps())
+
+ # Generate changelogs
+ subprocess.run("python3 -m towncrier", shell=True)
+
+ # Generate debian changelogs if its not an RC.
+ if not rc:
+ subprocess.run(
+ f'dch -M -v {new_version} "New synapse release {new_version}."', shell=True
+ )
+ subprocess.run('dch -M -r -D stable ""', shell=True)
+
+ # Show the user the changes and ask if they want to edit the change log.
+ repo.git.add("-u")
+ subprocess.run("git diff --cached", shell=True)
+
+ if click.confirm("Edit changelog?", default=False):
+ click.edit(filename="CHANGES.md")
+
+ # Commit the changes.
+ repo.git.add("-u")
+ repo.git.commit(f"-m {new_version}")
+
+ # We give the option to bail here in case the user wants to make sure things
+ # are OK before pushing.
+ if not click.confirm("Push branch to github?", default=True):
+ print("")
+ print("Run when ready to push:")
+ print("")
+ print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
+ print("")
+ sys.exit(0)
+
+ # Otherwise, push and open the changelog in the browser.
+ repo.git.push("-u", repo.remote().name, repo.active_branch.name)
+
+ click.launch(
+ f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
+ )
+
+
+def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
+ """Find the branch/ref, looking first locally then in the remote."""
+ if ref_name in repo.refs:
+ return repo.refs[ref_name]
+ elif ref_name in repo.remote().refs:
+ return repo.remote().refs[ref_name]
+ else:
+ return None
+
+
+def update_branch(repo: git.Repo):
+ """Ensure branch is up to date if it has a remote"""
+ if repo.active_branch.tracking_branch():
+ repo.git.merge(repo.active_branch.tracking_branch().name)
+
+
+if __name__ == "__main__":
+ run()
|