From 6d282a9c89ae9fb55fe7ccc8d0ab16bf18b206ec Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 2 Mar 2022 14:28:18 +0000 Subject: Make release script write correct no-op changelog (#12127) As we want to include the previous version in the "No new changes..." string. --- scripts-dev/release.py | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) (limited to 'scripts-dev') diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 4e1f99fee4..046453e65f 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -17,6 +17,8 @@ """An interactive script for doing a release. See `cli()` below. """ +import glob +import os import re import subprocess import sys @@ -209,8 +211,8 @@ def prepare(): with open("synapse/__init__.py", "w") as f: f.write(parsed_synapse_ast.dumps()) - # Generate changelogs - run_until_successful("python3 -m towncrier", shell=True) + # Generate changelogs. + generate_and_write_changelog(current_version) # Generate debian changelogs if parsed_new_version.pre is not None: @@ -523,5 +525,29 @@ def get_changes_for_version(wanted_version: version.Version) -> str: return "\n".join(version_changelog) +def generate_and_write_changelog(current_version: version.Version): + # We do this by getting a draft so that we can edit it before writing to the + # changelog. + result = run_until_successful( + "python3 -m towncrier --draft", shell=True, capture_output=True + ) + new_changes = result.stdout.decode("utf-8") + new_changes = new_changes.replace( + "No significant changes.", f"No significant changes since {current_version}." + ) + + # Prepend changes to changelog + with open("CHANGES.md", "r+") as f: + existing_content = f.read() + f.seek(0, 0) + f.write(new_changes) + f.write("\n") + f.write(existing_content) + + # Remove all the news fragments + for f in glob.iglob("changelog.d/*.*"): + os.remove(f) + + if __name__ == "__main__": cli() -- cgit 1.5.1 From 1fbe0316a991e77289d4577b16ff3fcd27c26dc8 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 2 Mar 2022 18:00:26 +0000 Subject: Add suffices to scripts in scripts-dev (#12137) * Rename scripts-dev to have suffices * Update references to `scripts-dev` * Changelog * These scripts don't pass mypy --- .github/workflows/release-artifacts.yml | 4 +- .github/workflows/tests.yml | 4 +- changelog.d/12137.misc | 1 + docs/code_style.md | 2 +- mypy.ini | 12 +- scripts-dev/build_debian_packages | 217 -------------------------------- scripts-dev/build_debian_packages.py | 217 ++++++++++++++++++++++++++++++++ scripts-dev/check-newsfragment | 62 --------- scripts-dev/check-newsfragment.sh | 62 +++++++++ scripts-dev/generate_sample_config | 28 ----- scripts-dev/generate_sample_config.sh | 28 +++++ scripts-dev/lint.sh | 2 - scripts-dev/sign_json | 166 ------------------------ scripts-dev/sign_json.py | 166 ++++++++++++++++++++++++ tox.ini | 2 - 15 files changed, 490 insertions(+), 483 deletions(-) create mode 100644 changelog.d/12137.misc delete mode 100755 scripts-dev/build_debian_packages create mode 100755 scripts-dev/build_debian_packages.py delete mode 100755 scripts-dev/check-newsfragment create mode 100755 scripts-dev/check-newsfragment.sh delete mode 100755 scripts-dev/generate_sample_config create mode 100755 scripts-dev/generate_sample_config.sh delete mode 100755 scripts-dev/sign_json create mode 100755 scripts-dev/sign_json.py (limited to 'scripts-dev') diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index eee3633d50..65ea761ad7 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -31,7 +31,7 @@ jobs: # if we're running from a tag, get the full list of distros; otherwise just use debian:sid dists='["debian:sid"]' if [[ $GITHUB_REF == refs/tags/* ]]; then - dists=$(scripts-dev/build_debian_packages --show-dists-json) + dists=$(scripts-dev/build_debian_packages.py --show-dists-json) fi echo "::set-output name=distros::$dists" # map the step outputs to job outputs @@ -74,7 +74,7 @@ jobs: # see https://github.com/docker/build-push-action/issues/252 # for the cache magic here run: | - ./src/scripts-dev/build_debian_packages \ + ./src/scripts-dev/build_debian_packages.py \ --docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \ --docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \ --docker-build-arg=--progress=plain \ diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e9e4277322..3f4e44ca59 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 - run: pip install -e . - - run: scripts-dev/generate_sample_config --check + - run: scripts-dev/generate_sample_config.sh --check lint: runs-on: ubuntu-latest @@ -51,7 +51,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v2 - run: "pip install 'towncrier>=18.6.0rc1'" - - run: scripts-dev/check-newsfragment + - run: scripts-dev/check-newsfragment.sh env: PULL_REQUEST_NUMBER: ${{ github.event.number }} diff --git a/changelog.d/12137.misc b/changelog.d/12137.misc new file mode 100644 index 0000000000..118ff77a91 --- /dev/null +++ b/changelog.d/12137.misc @@ -0,0 +1 @@ +Give `scripts-dev` scripts suffixes for neater CI config. \ No newline at end of file diff --git a/docs/code_style.md b/docs/code_style.md index 4d8e7c973d..e7c9cd1a5e 100644 --- a/docs/code_style.md +++ b/docs/code_style.md @@ -172,6 +172,6 @@ frobber: ``` Note that the sample configuration is generated from the synapse code -and is maintained by a script, `scripts-dev/generate_sample_config`. +and is maintained by a script, `scripts-dev/generate_sample_config.sh`. Making sure that the output from this script matches the desired format is left as an exercise for the reader! diff --git a/mypy.ini b/mypy.ini index 23ca4eaa5a..10971b7225 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,7 +11,7 @@ local_partial_types = True no_implicit_optional = True files = - scripts-dev/sign_json, + scripts-dev/, setup.py, synapse/, tests/ @@ -23,10 +23,20 @@ files = # https://docs.python.org/3/library/re.html#re.X exclude = (?x) ^( + |scripts-dev/build_debian_packages.py + |scripts-dev/check_signature.py + |scripts-dev/definitions.py + |scripts-dev/federation_client.py + |scripts-dev/hash_history.py + |scripts-dev/list_url_patterns.py + |scripts-dev/release.py + |scripts-dev/tail-synapse.py + |synapse/_scripts/export_signing_key.py |synapse/_scripts/move_remote_media_to_new_store.py |synapse/_scripts/synapse_port_db.py |synapse/_scripts/update_synapse_database.py + |synapse/storage/databases/__init__.py |synapse/storage/databases/main/__init__.py |synapse/storage/databases/main/cache.py diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages deleted file mode 100755 index 7ff96a1ee6..0000000000 --- a/scripts-dev/build_debian_packages +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env python3 - -# Build the Debian packages using Docker images. -# -# This script builds the Docker images and then executes them sequentially, each -# one building a Debian package for the targeted operating system. It is -# designed to be a "single command" to produce all the images. -# -# By default, builds for all known distributions, but a list of distributions -# can be passed on the commandline for debugging. - -import argparse -import json -import os -import signal -import subprocess -import sys -import threading -from concurrent.futures import ThreadPoolExecutor -from typing import Optional, Sequence - -DISTS = ( - "debian:buster", # oldstable: EOL 2022-08 - "debian:bullseye", - "debian:bookworm", - "debian:sid", - "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) - "ubuntu:impish", # 21.10 (EOL 2022-07) -) - -DESC = """\ -Builds .debs for synapse, using a Docker image for the build environment. - -By default, builds for all known distributions, but a list of distributions -can be passed on the commandline for debugging. -""" - -projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - -class Builder(object): - def __init__( - self, redirect_stdout=False, docker_build_args: Optional[Sequence[str]] = None - ): - self.redirect_stdout = redirect_stdout - self._docker_build_args = tuple(docker_build_args or ()) - self.active_containers = set() - self._lock = threading.Lock() - self._failed = False - - def run_build(self, dist, skip_tests=False): - """Build deb for a single distribution""" - - if self._failed: - print("not building %s due to earlier failure" % (dist,)) - raise Exception("failed") - - try: - self._inner_build(dist, skip_tests) - except Exception as e: - print("build of %s failed: %s" % (dist, e), file=sys.stderr) - self._failed = True - raise - - def _inner_build(self, dist, skip_tests=False): - tag = dist.split(":", 1)[1] - - # Make the dir where the debs will live. - # - # Note that we deliberately put this outside the source tree, otherwise - # we tend to get source packages which are full of debs. (We could hack - # around that with more magic in the build_debian.sh script, but that - # doesn't solve the problem for natively-run dpkg-buildpakage). - debsdir = os.path.join(projdir, "../debs") - os.makedirs(debsdir, exist_ok=True) - - if self.redirect_stdout: - logfile = os.path.join(debsdir, "%s.buildlog" % (tag,)) - print("building %s: directing output to %s" % (dist, logfile)) - stdout = open(logfile, "w") - else: - stdout = None - - # first build a docker image for the build environment - build_args = ( - ( - "docker", - "build", - "--tag", - "dh-venv-builder:" + tag, - "--build-arg", - "distro=" + dist, - "-f", - "docker/Dockerfile-dhvirtualenv", - ) - + self._docker_build_args - + ("docker",) - ) - - subprocess.check_call( - build_args, - stdout=stdout, - stderr=subprocess.STDOUT, - cwd=projdir, - ) - - container_name = "synapse_build_" + tag - with self._lock: - self.active_containers.add(container_name) - - # then run the build itself - subprocess.check_call( - [ - "docker", - "run", - "--rm", - "--name", - container_name, - "--volume=" + projdir + ":/synapse/source:ro", - "--volume=" + debsdir + ":/debs", - "-e", - "TARGET_USERID=%i" % (os.getuid(),), - "-e", - "TARGET_GROUPID=%i" % (os.getgid(),), - "-e", - "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), - "dh-venv-builder:" + tag, - ], - stdout=stdout, - stderr=subprocess.STDOUT, - ) - - with self._lock: - self.active_containers.remove(container_name) - - if stdout is not None: - stdout.close() - print("Completed build of %s" % (dist,)) - - def kill_containers(self): - with self._lock: - active = list(self.active_containers) - - for c in active: - print("killing container %s" % (c,)) - subprocess.run( - [ - "docker", - "kill", - c, - ], - stdout=subprocess.DEVNULL, - ) - with self._lock: - self.active_containers.remove(c) - - -def run_builds(builder, dists, jobs=1, skip_tests=False): - def sig(signum, _frame): - print("Caught SIGINT") - builder.kill_containers() - - signal.signal(signal.SIGINT, sig) - - with ThreadPoolExecutor(max_workers=jobs) as e: - res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists) - - # make sure we consume the iterable so that exceptions are raised. - for _ in res: - pass - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=DESC, - ) - parser.add_argument( - "-j", - "--jobs", - type=int, - default=1, - help="specify the number of builds to run in parallel", - ) - parser.add_argument( - "--no-check", - action="store_true", - help="skip running tests after building", - ) - parser.add_argument( - "--docker-build-arg", - action="append", - help="specify an argument to pass to docker build", - ) - parser.add_argument( - "--show-dists-json", - action="store_true", - help="instead of building the packages, just list the dists to build for, as a json array", - ) - parser.add_argument( - "dist", - nargs="*", - default=DISTS, - help="a list of distributions to build for. Default: %(default)s", - ) - args = parser.parse_args() - if args.show_dists_json: - print(json.dumps(DISTS)) - else: - builder = Builder( - redirect_stdout=(args.jobs > 1), docker_build_args=args.docker_build_arg - ) - run_builds( - builder, - dists=args.dist, - jobs=args.jobs, - skip_tests=args.no_check, - ) diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py new file mode 100755 index 0000000000..7ff96a1ee6 --- /dev/null +++ b/scripts-dev/build_debian_packages.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python3 + +# Build the Debian packages using Docker images. +# +# This script builds the Docker images and then executes them sequentially, each +# one building a Debian package for the targeted operating system. It is +# designed to be a "single command" to produce all the images. +# +# By default, builds for all known distributions, but a list of distributions +# can be passed on the commandline for debugging. + +import argparse +import json +import os +import signal +import subprocess +import sys +import threading +from concurrent.futures import ThreadPoolExecutor +from typing import Optional, Sequence + +DISTS = ( + "debian:buster", # oldstable: EOL 2022-08 + "debian:bullseye", + "debian:bookworm", + "debian:sid", + "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) + "ubuntu:impish", # 21.10 (EOL 2022-07) +) + +DESC = """\ +Builds .debs for synapse, using a Docker image for the build environment. + +By default, builds for all known distributions, but a list of distributions +can be passed on the commandline for debugging. +""" + +projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + + +class Builder(object): + def __init__( + self, redirect_stdout=False, docker_build_args: Optional[Sequence[str]] = None + ): + self.redirect_stdout = redirect_stdout + self._docker_build_args = tuple(docker_build_args or ()) + self.active_containers = set() + self._lock = threading.Lock() + self._failed = False + + def run_build(self, dist, skip_tests=False): + """Build deb for a single distribution""" + + if self._failed: + print("not building %s due to earlier failure" % (dist,)) + raise Exception("failed") + + try: + self._inner_build(dist, skip_tests) + except Exception as e: + print("build of %s failed: %s" % (dist, e), file=sys.stderr) + self._failed = True + raise + + def _inner_build(self, dist, skip_tests=False): + tag = dist.split(":", 1)[1] + + # Make the dir where the debs will live. + # + # Note that we deliberately put this outside the source tree, otherwise + # we tend to get source packages which are full of debs. (We could hack + # around that with more magic in the build_debian.sh script, but that + # doesn't solve the problem for natively-run dpkg-buildpakage). + debsdir = os.path.join(projdir, "../debs") + os.makedirs(debsdir, exist_ok=True) + + if self.redirect_stdout: + logfile = os.path.join(debsdir, "%s.buildlog" % (tag,)) + print("building %s: directing output to %s" % (dist, logfile)) + stdout = open(logfile, "w") + else: + stdout = None + + # first build a docker image for the build environment + build_args = ( + ( + "docker", + "build", + "--tag", + "dh-venv-builder:" + tag, + "--build-arg", + "distro=" + dist, + "-f", + "docker/Dockerfile-dhvirtualenv", + ) + + self._docker_build_args + + ("docker",) + ) + + subprocess.check_call( + build_args, + stdout=stdout, + stderr=subprocess.STDOUT, + cwd=projdir, + ) + + container_name = "synapse_build_" + tag + with self._lock: + self.active_containers.add(container_name) + + # then run the build itself + subprocess.check_call( + [ + "docker", + "run", + "--rm", + "--name", + container_name, + "--volume=" + projdir + ":/synapse/source:ro", + "--volume=" + debsdir + ":/debs", + "-e", + "TARGET_USERID=%i" % (os.getuid(),), + "-e", + "TARGET_GROUPID=%i" % (os.getgid(),), + "-e", + "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), + "dh-venv-builder:" + tag, + ], + stdout=stdout, + stderr=subprocess.STDOUT, + ) + + with self._lock: + self.active_containers.remove(container_name) + + if stdout is not None: + stdout.close() + print("Completed build of %s" % (dist,)) + + def kill_containers(self): + with self._lock: + active = list(self.active_containers) + + for c in active: + print("killing container %s" % (c,)) + subprocess.run( + [ + "docker", + "kill", + c, + ], + stdout=subprocess.DEVNULL, + ) + with self._lock: + self.active_containers.remove(c) + + +def run_builds(builder, dists, jobs=1, skip_tests=False): + def sig(signum, _frame): + print("Caught SIGINT") + builder.kill_containers() + + signal.signal(signal.SIGINT, sig) + + with ThreadPoolExecutor(max_workers=jobs) as e: + res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists) + + # make sure we consume the iterable so that exceptions are raised. + for _ in res: + pass + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=DESC, + ) + parser.add_argument( + "-j", + "--jobs", + type=int, + default=1, + help="specify the number of builds to run in parallel", + ) + parser.add_argument( + "--no-check", + action="store_true", + help="skip running tests after building", + ) + parser.add_argument( + "--docker-build-arg", + action="append", + help="specify an argument to pass to docker build", + ) + parser.add_argument( + "--show-dists-json", + action="store_true", + help="instead of building the packages, just list the dists to build for, as a json array", + ) + parser.add_argument( + "dist", + nargs="*", + default=DISTS, + help="a list of distributions to build for. Default: %(default)s", + ) + args = parser.parse_args() + if args.show_dists_json: + print(json.dumps(DISTS)) + else: + builder = Builder( + redirect_stdout=(args.jobs > 1), docker_build_args=args.docker_build_arg + ) + run_builds( + builder, + dists=args.dist, + jobs=args.jobs, + skip_tests=args.no_check, + ) diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment deleted file mode 100755 index 493558ad65..0000000000 --- a/scripts-dev/check-newsfragment +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env bash -# -# A script which checks that an appropriate news file has been added on this -# branch. - -echo -e "+++ \033[32mChecking newsfragment\033[m" - -set -e - -# make sure that origin/develop is up to date -git remote set-branches --add origin develop -git fetch -q origin develop - -pr="$PULL_REQUEST_NUMBER" - -# if there are changes in the debian directory, check that the debian changelog -# has been updated -if ! git diff --quiet FETCH_HEAD... -- debian; then - if git diff --quiet FETCH_HEAD... -- debian/changelog; then - echo "Updates to debian directory, but no update to the changelog." >&2 - echo "!! Please see the contributing guide for help writing your changelog entry:" >&2 - echo "https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#debian-changelog" >&2 - exit 1 - fi -fi - -# if there are changes *outside* the debian directory, check that the -# newsfragments have been updated. -if ! git diff --name-only FETCH_HEAD... | grep -qv '^debian/'; then - exit 0 -fi - -# Print a link to the contributing guide if the user makes a mistake -CONTRIBUTING_GUIDE_TEXT="!! Please see the contributing guide for help writing your changelog entry: -https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#changelog" - -# If check-newsfragment returns a non-zero exit code, print the contributing guide and exit -python -m towncrier.check --compare-with=origin/develop || (echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 && exit 1) - -echo -echo "--------------------------" -echo - -matched=0 -for f in $(git diff --diff-filter=d --name-only FETCH_HEAD... -- changelog.d); do - # check that any added newsfiles on this branch end with a full stop. - lastchar=$(tr -d '\n' < "$f" | tail -c 1) - if [ "$lastchar" != '.' ] && [ "$lastchar" != '!' ]; then - echo -e "\e[31mERROR: newsfragment $f does not end with a '.' or '!'\e[39m" >&2 - echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 - exit 1 - fi - - # see if this newsfile corresponds to the right PR - [[ -n "$pr" && "$f" == changelog.d/"$pr".* ]] && matched=1 -done - -if [[ -n "$pr" && "$matched" -eq 0 ]]; then - echo -e "\e[31mERROR: Did not find a news fragment with the right number: expected changelog.d/$pr.*.\e[39m" >&2 - echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 - exit 1 -fi diff --git a/scripts-dev/check-newsfragment.sh b/scripts-dev/check-newsfragment.sh new file mode 100755 index 0000000000..493558ad65 --- /dev/null +++ b/scripts-dev/check-newsfragment.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash +# +# A script which checks that an appropriate news file has been added on this +# branch. + +echo -e "+++ \033[32mChecking newsfragment\033[m" + +set -e + +# make sure that origin/develop is up to date +git remote set-branches --add origin develop +git fetch -q origin develop + +pr="$PULL_REQUEST_NUMBER" + +# if there are changes in the debian directory, check that the debian changelog +# has been updated +if ! git diff --quiet FETCH_HEAD... -- debian; then + if git diff --quiet FETCH_HEAD... -- debian/changelog; then + echo "Updates to debian directory, but no update to the changelog." >&2 + echo "!! Please see the contributing guide for help writing your changelog entry:" >&2 + echo "https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#debian-changelog" >&2 + exit 1 + fi +fi + +# if there are changes *outside* the debian directory, check that the +# newsfragments have been updated. +if ! git diff --name-only FETCH_HEAD... | grep -qv '^debian/'; then + exit 0 +fi + +# Print a link to the contributing guide if the user makes a mistake +CONTRIBUTING_GUIDE_TEXT="!! Please see the contributing guide for help writing your changelog entry: +https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#changelog" + +# If check-newsfragment returns a non-zero exit code, print the contributing guide and exit +python -m towncrier.check --compare-with=origin/develop || (echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 && exit 1) + +echo +echo "--------------------------" +echo + +matched=0 +for f in $(git diff --diff-filter=d --name-only FETCH_HEAD... -- changelog.d); do + # check that any added newsfiles on this branch end with a full stop. + lastchar=$(tr -d '\n' < "$f" | tail -c 1) + if [ "$lastchar" != '.' ] && [ "$lastchar" != '!' ]; then + echo -e "\e[31mERROR: newsfragment $f does not end with a '.' or '!'\e[39m" >&2 + echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 + exit 1 + fi + + # see if this newsfile corresponds to the right PR + [[ -n "$pr" && "$f" == changelog.d/"$pr".* ]] && matched=1 +done + +if [[ -n "$pr" && "$matched" -eq 0 ]]; then + echo -e "\e[31mERROR: Did not find a news fragment with the right number: expected changelog.d/$pr.*.\e[39m" >&2 + echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 + exit 1 +fi diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config deleted file mode 100755 index 185e277933..0000000000 --- a/scripts-dev/generate_sample_config +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash -# -# Update/check the docs/sample_config.yaml - -set -e - -cd "$(dirname "$0")/.." - -SAMPLE_CONFIG="docs/sample_config.yaml" -SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml" - -check() { - diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || return 1 -} - -if [ "$1" == "--check" ]; then - diff -u "$SAMPLE_CONFIG" <(synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml) >/dev/null || { - echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2 - exit 1 - } - diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || { - echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2 - exit 1 - } -else - synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG" - synapse/_scripts/generate_log_config.py -o "$SAMPLE_LOG_CONFIG" -fi diff --git a/scripts-dev/generate_sample_config.sh b/scripts-dev/generate_sample_config.sh new file mode 100755 index 0000000000..375897eacb --- /dev/null +++ b/scripts-dev/generate_sample_config.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# +# Update/check the docs/sample_config.yaml + +set -e + +cd "$(dirname "$0")/.." + +SAMPLE_CONFIG="docs/sample_config.yaml" +SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml" + +check() { + diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || return 1 +} + +if [ "$1" == "--check" ]; then + diff -u "$SAMPLE_CONFIG" <(synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml) >/dev/null || { + echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config.sh\`.\e[0m" >&2 + exit 1 + } + diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || { + echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config.sh\`.\e[0m" >&2 + exit 1 + } +else + synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG" + synapse/_scripts/generate_log_config.py -o "$SAMPLE_LOG_CONFIG" +fi diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index df4d4934d0..2f5f2c3566 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -85,8 +85,6 @@ else "synapse" "docker" "tests" # annoyingly, black doesn't find these so we have to list them "scripts-dev" - "scripts-dev/build_debian_packages" - "scripts-dev/sign_json" "contrib" "synctl" "setup.py" "synmark" "stubs" ".ci" ) fi diff --git a/scripts-dev/sign_json b/scripts-dev/sign_json deleted file mode 100755 index 9459543106..0000000000 --- a/scripts-dev/sign_json +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2020 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import json -import sys -from json import JSONDecodeError - -import yaml -from signedjson.key import read_signing_keys -from signedjson.sign import sign_json - -from synapse.api.room_versions import KNOWN_ROOM_VERSIONS -from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.util import json_encoder - - -def main(): - parser = argparse.ArgumentParser( - description="""Adds a signature to a JSON object. - -Example usage: - - $ scripts-dev/sign_json.py -N test -k localhost.signing.key "{}" - {"signatures":{"test":{"ed25519:a_ZnZh":"LmPnml6iM0iR..."}}} -""", - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - - parser.add_argument( - "-N", - "--server-name", - help="Name to give as the local homeserver. If unspecified, will be " - "read from the config file.", - ) - - parser.add_argument( - "-k", - "--signing-key-path", - help="Path to the file containing the private ed25519 key to sign the " - "request with.", - ) - - parser.add_argument( - "-K", - "--signing-key", - help="The private ed25519 key to sign the request with.", - ) - - parser.add_argument( - "-c", - "--config", - default="homeserver.yaml", - help=( - "Path to synapse config file, from which the server name and/or signing " - "key path will be read. Ignored if --server-name and --signing-key(-path) " - "are both given." - ), - ) - - parser.add_argument( - "--sign-event-room-version", - type=str, - help=( - "Sign the JSON as an event for the given room version, rather than raw JSON. " - "This means that we will add a 'hashes' object, and redact the event before " - "signing." - ), - ) - - input_args = parser.add_mutually_exclusive_group() - - input_args.add_argument("input_data", nargs="?", help="Raw JSON to be signed.") - - input_args.add_argument( - "-i", - "--input", - type=argparse.FileType("r"), - default=sys.stdin, - help=( - "A file from which to read the JSON to be signed. If neither --input nor " - "input_data are given, JSON will be read from stdin." - ), - ) - - parser.add_argument( - "-o", - "--output", - type=argparse.FileType("w"), - default=sys.stdout, - help="Where to write the signed JSON. Defaults to stdout.", - ) - - args = parser.parse_args() - - if not args.server_name or not (args.signing_key_path or args.signing_key): - read_args_from_config(args) - - if args.signing_key: - keys = read_signing_keys([args.signing_key]) - else: - with open(args.signing_key_path) as f: - keys = read_signing_keys(f) - - json_to_sign = args.input_data - if json_to_sign is None: - json_to_sign = args.input.read() - - try: - obj = json.loads(json_to_sign) - except JSONDecodeError as e: - print("Unable to parse input as JSON: %s" % e, file=sys.stderr) - sys.exit(1) - - if not isinstance(obj, dict): - print("Input json was not an object", file=sys.stderr) - sys.exit(1) - - if args.sign_event_room_version: - room_version = KNOWN_ROOM_VERSIONS.get(args.sign_event_room_version) - if not room_version: - print( - f"Unknown room version {args.sign_event_room_version}", file=sys.stderr - ) - sys.exit(1) - add_hashes_and_signatures(room_version, obj, args.server_name, keys[0]) - else: - sign_json(obj, args.server_name, keys[0]) - - for c in json_encoder.iterencode(obj): - args.output.write(c) - args.output.write("\n") - - -def read_args_from_config(args: argparse.Namespace) -> None: - with open(args.config, "r") as fh: - config = yaml.safe_load(fh) - if not args.server_name: - args.server_name = config["server_name"] - if not args.signing_key_path and not args.signing_key: - if "signing_key" in config: - args.signing_key = config["signing_key"] - elif "signing_key_path" in config: - args.signing_key_path = config["signing_key_path"] - else: - print( - "A signing key must be given on the commandline or in the config file.", - file=sys.stderr, - ) - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/scripts-dev/sign_json.py b/scripts-dev/sign_json.py new file mode 100755 index 0000000000..9459543106 --- /dev/null +++ b/scripts-dev/sign_json.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +# +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import json +import sys +from json import JSONDecodeError + +import yaml +from signedjson.key import read_signing_keys +from signedjson.sign import sign_json + +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS +from synapse.crypto.event_signing import add_hashes_and_signatures +from synapse.util import json_encoder + + +def main(): + parser = argparse.ArgumentParser( + description="""Adds a signature to a JSON object. + +Example usage: + + $ scripts-dev/sign_json.py -N test -k localhost.signing.key "{}" + {"signatures":{"test":{"ed25519:a_ZnZh":"LmPnml6iM0iR..."}}} +""", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument( + "-N", + "--server-name", + help="Name to give as the local homeserver. If unspecified, will be " + "read from the config file.", + ) + + parser.add_argument( + "-k", + "--signing-key-path", + help="Path to the file containing the private ed25519 key to sign the " + "request with.", + ) + + parser.add_argument( + "-K", + "--signing-key", + help="The private ed25519 key to sign the request with.", + ) + + parser.add_argument( + "-c", + "--config", + default="homeserver.yaml", + help=( + "Path to synapse config file, from which the server name and/or signing " + "key path will be read. Ignored if --server-name and --signing-key(-path) " + "are both given." + ), + ) + + parser.add_argument( + "--sign-event-room-version", + type=str, + help=( + "Sign the JSON as an event for the given room version, rather than raw JSON. " + "This means that we will add a 'hashes' object, and redact the event before " + "signing." + ), + ) + + input_args = parser.add_mutually_exclusive_group() + + input_args.add_argument("input_data", nargs="?", help="Raw JSON to be signed.") + + input_args.add_argument( + "-i", + "--input", + type=argparse.FileType("r"), + default=sys.stdin, + help=( + "A file from which to read the JSON to be signed. If neither --input nor " + "input_data are given, JSON will be read from stdin." + ), + ) + + parser.add_argument( + "-o", + "--output", + type=argparse.FileType("w"), + default=sys.stdout, + help="Where to write the signed JSON. Defaults to stdout.", + ) + + args = parser.parse_args() + + if not args.server_name or not (args.signing_key_path or args.signing_key): + read_args_from_config(args) + + if args.signing_key: + keys = read_signing_keys([args.signing_key]) + else: + with open(args.signing_key_path) as f: + keys = read_signing_keys(f) + + json_to_sign = args.input_data + if json_to_sign is None: + json_to_sign = args.input.read() + + try: + obj = json.loads(json_to_sign) + except JSONDecodeError as e: + print("Unable to parse input as JSON: %s" % e, file=sys.stderr) + sys.exit(1) + + if not isinstance(obj, dict): + print("Input json was not an object", file=sys.stderr) + sys.exit(1) + + if args.sign_event_room_version: + room_version = KNOWN_ROOM_VERSIONS.get(args.sign_event_room_version) + if not room_version: + print( + f"Unknown room version {args.sign_event_room_version}", file=sys.stderr + ) + sys.exit(1) + add_hashes_and_signatures(room_version, obj, args.server_name, keys[0]) + else: + sign_json(obj, args.server_name, keys[0]) + + for c in json_encoder.iterencode(obj): + args.output.write(c) + args.output.write("\n") + + +def read_args_from_config(args: argparse.Namespace) -> None: + with open(args.config, "r") as fh: + config = yaml.safe_load(fh) + if not args.server_name: + args.server_name = config["server_name"] + if not args.signing_key_path and not args.signing_key: + if "signing_key" in config: + args.signing_key = config["signing_key"] + elif "signing_key_path" in config: + args.signing_key_path = config["signing_key_path"] + else: + print( + "A signing key must be given on the commandline or in the config file.", + file=sys.stderr, + ) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/tox.ini b/tox.ini index 8d6aa7580b..f4829200cc 100644 --- a/tox.ini +++ b/tox.ini @@ -40,8 +40,6 @@ lint_targets = tests # annoyingly, black doesn't find these so we have to list them scripts-dev - scripts-dev/build_debian_packages - scripts-dev/sign_json stubs contrib synctl -- cgit 1.5.1 From 31b125ccec75e708b09f40205c8cfe692edfa6b4 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 3 Mar 2022 04:45:23 -0600 Subject: Enable MSC3030 Complement tests in Synapse (#12144) The Complement tests for MSC3030 are now merged, https://github.com/matrix-org/complement/pull/178 Synapse implmentation: https://github.com/matrix-org/synapse/pull/9445 --- .github/workflows/tests.yml | 2 +- changelog.d/12144.misc | 1 + scripts-dev/complement.sh | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/12144.misc (limited to 'scripts-dev') diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3f4e44ca59..fa9611d42b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -376,7 +376,7 @@ jobs: # Run Complement - run: | set -o pipefail - go test -v -json -p 1 -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt + go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc3030 ./tests/... 2>&1 | gotestfmt shell: bash name: Run Complement Tests env: diff --git a/changelog.d/12144.misc b/changelog.d/12144.misc new file mode 100644 index 0000000000..d8f71bb203 --- /dev/null +++ b/changelog.d/12144.misc @@ -0,0 +1 @@ +Enable [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) Complement tests in CI. diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index 0aecb3daf1..e3d3e0f293 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -71,4 +71,4 @@ fi # Run the tests! echo "Images built; running complement" -go test -v -tags synapse_blacklist,msc2403 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... +go test -v -tags synapse_blacklist,msc2403,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... -- cgit 1.5.1 From a511a890d7c556ad357d27443e5665e6cc25e0b5 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 3 Mar 2022 05:19:20 -0600 Subject: Enable MSC2716 Complement tests in Synapse (#12145) Co-authored-by: Brendan Abolivier --- .github/workflows/tests.yml | 2 +- changelog.d/12145.misc | 1 + scripts-dev/complement.sh | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/12145.misc (limited to 'scripts-dev') diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fa9611d42b..c89c50cd07 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -376,7 +376,7 @@ jobs: # Run Complement - run: | set -o pipefail - go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc3030 ./tests/... 2>&1 | gotestfmt + go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt shell: bash name: Run Complement Tests env: diff --git a/changelog.d/12145.misc b/changelog.d/12145.misc new file mode 100644 index 0000000000..4092a2d66e --- /dev/null +++ b/changelog.d/12145.misc @@ -0,0 +1 @@ +Enable [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) Complement tests in CI. diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index e3d3e0f293..0a79a4063f 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -71,4 +71,4 @@ fi # Run the tests! echo "Images built; running complement" -go test -v -tags synapse_blacklist,msc2403,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... +go test -v -tags synapse_blacklist,msc2403,msc2716,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... -- cgit 1.5.1 From 4aeb00ca20a0d9dbb2a104591aca081c723eb6d9 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Fri, 4 Mar 2022 11:58:49 +0000 Subject: Move synctl into `synapse._scripts` and expose as an entrypoint (#12140) --- .dockerignore | 1 - MANIFEST.in | 1 - changelog.d/12140.misc | 1 + docker/Dockerfile | 2 +- docs/postgres.md | 8 +- docs/turn-howto.md | 5 +- docs/upgrade.md | 23 ++- scripts-dev/lint.sh | 2 +- setup.py | 2 +- synapse/_scripts/synctl.py | 360 +++++++++++++++++++++++++++++++++++++++++++++ synctl | 360 --------------------------------------------- tox.ini | 1 - 12 files changed, 393 insertions(+), 373 deletions(-) create mode 100644 changelog.d/12140.misc create mode 100755 synapse/_scripts/synctl.py delete mode 100755 synctl (limited to 'scripts-dev') diff --git a/.dockerignore b/.dockerignore index 617f701597..434231fce9 100644 --- a/.dockerignore +++ b/.dockerignore @@ -7,6 +7,5 @@ !MANIFEST.in !README.rst !setup.py -!synctl **/__pycache__ diff --git a/MANIFEST.in b/MANIFEST.in index f1e295e583..d744c090ac 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,3 @@ -include synctl include LICENSE include VERSION include *.rst diff --git a/changelog.d/12140.misc b/changelog.d/12140.misc new file mode 100644 index 0000000000..33a21a29f0 --- /dev/null +++ b/changelog.d/12140.misc @@ -0,0 +1 @@ +Move `synctl` into `synapse._scripts` and expose as an entry point. \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 327275a9ca..24b5515eb9 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -46,7 +46,7 @@ RUN \ && rm -rf /var/lib/apt/lists/* # Copy just what we need to pip install -COPY MANIFEST.in README.rst setup.py synctl /synapse/ +COPY MANIFEST.in README.rst setup.py /synapse/ COPY synapse/__init__.py /synapse/synapse/__init__.py COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py diff --git a/docs/postgres.md b/docs/postgres.md index 0562021da5..de4e2ba4b7 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -153,9 +153,9 @@ database file (typically `homeserver.db`) to another location. Once the copy is complete, restart synapse. For instance: ```sh -./synctl stop +synctl stop cp homeserver.db homeserver.db.snapshot -./synctl start +synctl start ``` Copy the old config file into a new config file: @@ -192,10 +192,10 @@ Once that has completed, change the synapse config to point at the PostgreSQL database configuration file `homeserver-postgres.yaml`: ```sh -./synctl stop +synctl stop mv homeserver.yaml homeserver-old-sqlite.yaml mv homeserver-postgres.yaml homeserver.yaml -./synctl start +synctl start ``` Synapse should now be running against PostgreSQL. diff --git a/docs/turn-howto.md b/docs/turn-howto.md index eba7ca6124..3a2cd04e36 100644 --- a/docs/turn-howto.md +++ b/docs/turn-howto.md @@ -238,8 +238,9 @@ After updating the homeserver configuration, you must restart synapse: * If you use synctl: ```sh - cd /where/you/run/synapse - ./synctl restart + # Depending on how Synapse is installed, synctl may already be on + # your PATH. If not, you may need to activate a virtual environment. + synctl restart ``` * If you use systemd: ```sh diff --git a/docs/upgrade.md b/docs/upgrade.md index f9be3ac6bc..0d0bb066ee 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -47,7 +47,7 @@ this document. 3. Restart Synapse: ```bash - ./synctl restart + synctl restart ``` To check whether your update was successful, you can check the running @@ -85,6 +85,27 @@ process, for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` +# Upgrading to v1.55.0 + +## `synctl` script has been moved + +The `synctl` script +[has been made](https://github.com/matrix-org/synapse/pull/12140) an +[entry point](https://packaging.python.org/en/latest/specifications/entry-points/) +and no longer exists at the root of Synapse's source tree. If you wish to use +`synctl` to manage your homeserver, you should invoke `synctl` directly, e.g. +`synctl start` instead of `./synctl start` or `/path/to/synctl start`. + +You will need to ensure `synctl` is on your `PATH`. + - This is automatically the case when using + [Debian packages](https://packages.matrix.org/debian/) or + [docker images](https://hub.docker.com/r/matrixdotorg/synapse) + provided by Matrix.org. + - When installing from a wheel, sdist, or PyPI, a `synctl` executable is added + to your Python installation's `bin`. This should be on your `PATH` + automatically, though you might need to activate a virtual environment + depending on how you installed Synapse. + # Upgrading to v1.54.0 ## Legacy structured logging configuration removal diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 2f5f2c3566..c063fafa97 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -85,7 +85,7 @@ else "synapse" "docker" "tests" # annoyingly, black doesn't find these so we have to list them "scripts-dev" - "contrib" "synctl" "setup.py" "synmark" "stubs" ".ci" + "contrib" "setup.py" "synmark" "stubs" ".ci" ) fi fi diff --git a/setup.py b/setup.py index 318df16766..439ed75d72 100755 --- a/setup.py +++ b/setup.py @@ -155,6 +155,7 @@ setup( # Application "synapse_homeserver = synapse.app.homeserver:main", "synapse_worker = synapse.app.generic_worker:main", + "synctl = synapse._scripts.synctl:main", # Scripts "export_signing_key = synapse._scripts.export_signing_key:main", "generate_config = synapse._scripts.generate_config:main", @@ -177,6 +178,5 @@ setup( "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", ], - scripts=["synctl"], cmdclass={"test": TestCommand}, ) diff --git a/synapse/_scripts/synctl.py b/synapse/_scripts/synctl.py new file mode 100755 index 0000000000..1ab36949c7 --- /dev/null +++ b/synapse/_scripts/synctl.py @@ -0,0 +1,360 @@ +#!/usr/bin/env python +# Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import collections +import errno +import glob +import os +import os.path +import signal +import subprocess +import sys +import time +from typing import Iterable, Optional + +import yaml + +from synapse.config import find_config_files + +MAIN_PROCESS = "synapse.app.homeserver" + +GREEN = "\x1b[1;32m" +YELLOW = "\x1b[1;33m" +RED = "\x1b[1;31m" +NORMAL = "\x1b[m" + +SYNCTL_CACHE_FACTOR_WARNING = """\ +Setting 'synctl_cache_factor' in the config is deprecated. Instead, please do +one of the following: + - Either set the environment variable 'SYNAPSE_CACHE_FACTOR' + - or set 'caches.global_factor' in the homeserver config. +--------------------------------------------------------------------------------""" + + +def pid_running(pid): + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.EPERM: + pass # process exists + else: + return False + + # When running in a container, orphan processes may not get reaped and their + # PIDs may remain valid. Try to work around the issue. + try: + with open(f"/proc/{pid}/status") as status_file: + if "zombie" in status_file.read(): + return False + except Exception: + # This isn't Linux or `/proc/` is unavailable. + # Assume that the process is still running. + pass + + return True + + +def write(message, colour=NORMAL, stream=sys.stdout): + # Lets check if we're writing to a TTY before colouring + should_colour = False + try: + should_colour = stream.isatty() + except AttributeError: + # Just in case `isatty` isn't defined on everything. The python + # docs are incredibly vague. + pass + + if not should_colour: + stream.write(message + "\n") + else: + stream.write(colour + message + NORMAL + "\n") + + +def abort(message, colour=RED, stream=sys.stderr): + write(message, colour, stream) + sys.exit(1) + + +def start(pidfile: str, app: str, config_files: Iterable[str], daemonize: bool) -> bool: + """Attempts to start a synapse main or worker process. + Args: + pidfile: the pidfile we expect the process to create + app: the python module to run + config_files: config files to pass to synapse + daemonize: if True, will include a --daemonize argument to synapse + + Returns: + True if the process started successfully or was already running + False if there was an error starting the process + """ + + if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())): + print(app + " already running") + return True + + args = [sys.executable, "-m", app] + for c in config_files: + args += ["-c", c] + if daemonize: + args.append("--daemonize") + + try: + subprocess.check_call(args) + write("started %s(%s)" % (app, ",".join(config_files)), colour=GREEN) + return True + except subprocess.CalledProcessError as e: + err = "%s(%s) failed to start (exit code: %d). Check the Synapse logfile" % ( + app, + ",".join(config_files), + e.returncode, + ) + if daemonize: + err += ", or run synctl with --no-daemonize" + err += "." + write(err, colour=RED, stream=sys.stderr) + return False + + +def stop(pidfile: str, app: str) -> Optional[int]: + """Attempts to kill a synapse worker from the pidfile. + Args: + pidfile: path to file containing worker's pid + app: name of the worker's appservice + + Returns: + process id, or None if the process was not running + """ + + if os.path.exists(pidfile): + pid = int(open(pidfile).read()) + try: + os.kill(pid, signal.SIGTERM) + write("stopped %s" % (app,), colour=GREEN) + return pid + except OSError as err: + if err.errno == errno.ESRCH: + write("%s not running" % (app,), colour=YELLOW) + elif err.errno == errno.EPERM: + abort("Cannot stop %s: Operation not permitted" % (app,)) + else: + abort("Cannot stop %s: Unknown error" % (app,)) + else: + write( + "No running worker of %s found (from %s)\nThe process might be managed by another controller (e.g. systemd)" + % (app, pidfile), + colour=YELLOW, + ) + return None + + +Worker = collections.namedtuple( + "Worker", ["app", "configfile", "pidfile", "cache_factor", "cache_factors"] +) + + +def main(): + + parser = argparse.ArgumentParser() + + parser.add_argument( + "action", + choices=["start", "stop", "restart"], + help="whether to start, stop or restart the synapse", + ) + parser.add_argument( + "configfile", + nargs="?", + default="homeserver.yaml", + help="the homeserver config file. Defaults to homeserver.yaml. May also be" + " a directory with *.yaml files", + ) + parser.add_argument( + "-w", "--worker", metavar="WORKERCONFIG", help="start or stop a single worker" + ) + parser.add_argument( + "-a", + "--all-processes", + metavar="WORKERCONFIGDIR", + help="start or stop all the workers in the given directory" + " and the main synapse process", + ) + parser.add_argument( + "--no-daemonize", + action="store_false", + dest="daemonize", + help="Run synapse in the foreground for debugging. " + "Will work only if the daemonize option is not set in the config.", + ) + + options = parser.parse_args() + + if options.worker and options.all_processes: + write('Cannot use "--worker" with "--all-processes"', stream=sys.stderr) + sys.exit(1) + if not options.daemonize and options.all_processes: + write('Cannot use "--no-daemonize" with "--all-processes"', stream=sys.stderr) + sys.exit(1) + + configfile = options.configfile + + if not os.path.exists(configfile): + write( + f"Config file {configfile} does not exist.\n" + f"To generate a config file, run:\n" + f" {sys.executable} -m {MAIN_PROCESS}" + f" -c {configfile} --generate-config" + f" --server-name= --report-stats=\n", + stream=sys.stderr, + ) + sys.exit(1) + + config_files = find_config_files([configfile]) + config = {} + for config_file in config_files: + with open(config_file) as file_stream: + yaml_config = yaml.safe_load(file_stream) + if yaml_config is not None: + config.update(yaml_config) + + pidfile = config["pid_file"] + cache_factor = config.get("synctl_cache_factor") + start_stop_synapse = True + + if cache_factor: + write(SYNCTL_CACHE_FACTOR_WARNING) + os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor) + + cache_factors = config.get("synctl_cache_factors", {}) + for cache_name, factor in cache_factors.items(): + os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor) + + worker_configfiles = [] + if options.worker: + start_stop_synapse = False + worker_configfile = options.worker + if not os.path.exists(worker_configfile): + write( + "No worker config found at %r" % (worker_configfile,), stream=sys.stderr + ) + sys.exit(1) + worker_configfiles.append(worker_configfile) + + if options.all_processes: + # To start the main synapse with -a you need to add a worker file + # with worker_app == "synapse.app.homeserver" + start_stop_synapse = False + worker_configdir = options.all_processes + if not os.path.isdir(worker_configdir): + write( + "No worker config directory found at %r" % (worker_configdir,), + stream=sys.stderr, + ) + sys.exit(1) + worker_configfiles.extend( + sorted(glob.glob(os.path.join(worker_configdir, "*.yaml"))) + ) + + workers = [] + for worker_configfile in worker_configfiles: + with open(worker_configfile) as stream: + worker_config = yaml.safe_load(stream) + worker_app = worker_config["worker_app"] + if worker_app == "synapse.app.homeserver": + # We need to special case all of this to pick up options that may + # be set in the main config file or in this worker config file. + worker_pidfile = worker_config.get("pid_file") or pidfile + worker_cache_factor = ( + worker_config.get("synctl_cache_factor") or cache_factor + ) + worker_cache_factors = ( + worker_config.get("synctl_cache_factors") or cache_factors + ) + # The master process doesn't support using worker_* config. + for key in worker_config: + if key == "worker_app": # But we allow worker_app + continue + assert not key.startswith( + "worker_" + ), "Main process cannot use worker_* config" + else: + worker_pidfile = worker_config["worker_pid_file"] + worker_cache_factor = worker_config.get("synctl_cache_factor") + worker_cache_factors = worker_config.get("synctl_cache_factors", {}) + workers.append( + Worker( + worker_app, + worker_configfile, + worker_pidfile, + worker_cache_factor, + worker_cache_factors, + ) + ) + + action = options.action + + if action == "stop" or action == "restart": + running_pids = [] + for worker in workers: + pid = stop(worker.pidfile, worker.app) + if pid is not None: + running_pids.append(pid) + + if start_stop_synapse: + pid = stop(pidfile, MAIN_PROCESS) + if pid is not None: + running_pids.append(pid) + + if len(running_pids) > 0: + write("Waiting for processes to exit...") + for running_pid in running_pids: + while pid_running(running_pid): + time.sleep(0.2) + write("All processes exited") + + if action == "start" or action == "restart": + error = False + if start_stop_synapse: + if not start(pidfile, MAIN_PROCESS, (configfile,), options.daemonize): + error = True + + for worker in workers: + env = os.environ.copy() + + if worker.cache_factor: + os.environ["SYNAPSE_CACHE_FACTOR"] = str(worker.cache_factor) + + for cache_name, factor in worker.cache_factors.items(): + os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor) + + if not start( + worker.pidfile, + worker.app, + (configfile, worker.configfile), + options.daemonize, + ): + error = True + + # Reset env back to the original + os.environ.clear() + os.environ.update(env) + + if error: + exit(1) + + +if __name__ == "__main__": + main() diff --git a/synctl b/synctl deleted file mode 100755 index 1ab36949c7..0000000000 --- a/synctl +++ /dev/null @@ -1,360 +0,0 @@ -#!/usr/bin/env python -# Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import collections -import errno -import glob -import os -import os.path -import signal -import subprocess -import sys -import time -from typing import Iterable, Optional - -import yaml - -from synapse.config import find_config_files - -MAIN_PROCESS = "synapse.app.homeserver" - -GREEN = "\x1b[1;32m" -YELLOW = "\x1b[1;33m" -RED = "\x1b[1;31m" -NORMAL = "\x1b[m" - -SYNCTL_CACHE_FACTOR_WARNING = """\ -Setting 'synctl_cache_factor' in the config is deprecated. Instead, please do -one of the following: - - Either set the environment variable 'SYNAPSE_CACHE_FACTOR' - - or set 'caches.global_factor' in the homeserver config. ---------------------------------------------------------------------------------""" - - -def pid_running(pid): - try: - os.kill(pid, 0) - except OSError as err: - if err.errno == errno.EPERM: - pass # process exists - else: - return False - - # When running in a container, orphan processes may not get reaped and their - # PIDs may remain valid. Try to work around the issue. - try: - with open(f"/proc/{pid}/status") as status_file: - if "zombie" in status_file.read(): - return False - except Exception: - # This isn't Linux or `/proc/` is unavailable. - # Assume that the process is still running. - pass - - return True - - -def write(message, colour=NORMAL, stream=sys.stdout): - # Lets check if we're writing to a TTY before colouring - should_colour = False - try: - should_colour = stream.isatty() - except AttributeError: - # Just in case `isatty` isn't defined on everything. The python - # docs are incredibly vague. - pass - - if not should_colour: - stream.write(message + "\n") - else: - stream.write(colour + message + NORMAL + "\n") - - -def abort(message, colour=RED, stream=sys.stderr): - write(message, colour, stream) - sys.exit(1) - - -def start(pidfile: str, app: str, config_files: Iterable[str], daemonize: bool) -> bool: - """Attempts to start a synapse main or worker process. - Args: - pidfile: the pidfile we expect the process to create - app: the python module to run - config_files: config files to pass to synapse - daemonize: if True, will include a --daemonize argument to synapse - - Returns: - True if the process started successfully or was already running - False if there was an error starting the process - """ - - if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())): - print(app + " already running") - return True - - args = [sys.executable, "-m", app] - for c in config_files: - args += ["-c", c] - if daemonize: - args.append("--daemonize") - - try: - subprocess.check_call(args) - write("started %s(%s)" % (app, ",".join(config_files)), colour=GREEN) - return True - except subprocess.CalledProcessError as e: - err = "%s(%s) failed to start (exit code: %d). Check the Synapse logfile" % ( - app, - ",".join(config_files), - e.returncode, - ) - if daemonize: - err += ", or run synctl with --no-daemonize" - err += "." - write(err, colour=RED, stream=sys.stderr) - return False - - -def stop(pidfile: str, app: str) -> Optional[int]: - """Attempts to kill a synapse worker from the pidfile. - Args: - pidfile: path to file containing worker's pid - app: name of the worker's appservice - - Returns: - process id, or None if the process was not running - """ - - if os.path.exists(pidfile): - pid = int(open(pidfile).read()) - try: - os.kill(pid, signal.SIGTERM) - write("stopped %s" % (app,), colour=GREEN) - return pid - except OSError as err: - if err.errno == errno.ESRCH: - write("%s not running" % (app,), colour=YELLOW) - elif err.errno == errno.EPERM: - abort("Cannot stop %s: Operation not permitted" % (app,)) - else: - abort("Cannot stop %s: Unknown error" % (app,)) - else: - write( - "No running worker of %s found (from %s)\nThe process might be managed by another controller (e.g. systemd)" - % (app, pidfile), - colour=YELLOW, - ) - return None - - -Worker = collections.namedtuple( - "Worker", ["app", "configfile", "pidfile", "cache_factor", "cache_factors"] -) - - -def main(): - - parser = argparse.ArgumentParser() - - parser.add_argument( - "action", - choices=["start", "stop", "restart"], - help="whether to start, stop or restart the synapse", - ) - parser.add_argument( - "configfile", - nargs="?", - default="homeserver.yaml", - help="the homeserver config file. Defaults to homeserver.yaml. May also be" - " a directory with *.yaml files", - ) - parser.add_argument( - "-w", "--worker", metavar="WORKERCONFIG", help="start or stop a single worker" - ) - parser.add_argument( - "-a", - "--all-processes", - metavar="WORKERCONFIGDIR", - help="start or stop all the workers in the given directory" - " and the main synapse process", - ) - parser.add_argument( - "--no-daemonize", - action="store_false", - dest="daemonize", - help="Run synapse in the foreground for debugging. " - "Will work only if the daemonize option is not set in the config.", - ) - - options = parser.parse_args() - - if options.worker and options.all_processes: - write('Cannot use "--worker" with "--all-processes"', stream=sys.stderr) - sys.exit(1) - if not options.daemonize and options.all_processes: - write('Cannot use "--no-daemonize" with "--all-processes"', stream=sys.stderr) - sys.exit(1) - - configfile = options.configfile - - if not os.path.exists(configfile): - write( - f"Config file {configfile} does not exist.\n" - f"To generate a config file, run:\n" - f" {sys.executable} -m {MAIN_PROCESS}" - f" -c {configfile} --generate-config" - f" --server-name= --report-stats=\n", - stream=sys.stderr, - ) - sys.exit(1) - - config_files = find_config_files([configfile]) - config = {} - for config_file in config_files: - with open(config_file) as file_stream: - yaml_config = yaml.safe_load(file_stream) - if yaml_config is not None: - config.update(yaml_config) - - pidfile = config["pid_file"] - cache_factor = config.get("synctl_cache_factor") - start_stop_synapse = True - - if cache_factor: - write(SYNCTL_CACHE_FACTOR_WARNING) - os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor) - - cache_factors = config.get("synctl_cache_factors", {}) - for cache_name, factor in cache_factors.items(): - os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor) - - worker_configfiles = [] - if options.worker: - start_stop_synapse = False - worker_configfile = options.worker - if not os.path.exists(worker_configfile): - write( - "No worker config found at %r" % (worker_configfile,), stream=sys.stderr - ) - sys.exit(1) - worker_configfiles.append(worker_configfile) - - if options.all_processes: - # To start the main synapse with -a you need to add a worker file - # with worker_app == "synapse.app.homeserver" - start_stop_synapse = False - worker_configdir = options.all_processes - if not os.path.isdir(worker_configdir): - write( - "No worker config directory found at %r" % (worker_configdir,), - stream=sys.stderr, - ) - sys.exit(1) - worker_configfiles.extend( - sorted(glob.glob(os.path.join(worker_configdir, "*.yaml"))) - ) - - workers = [] - for worker_configfile in worker_configfiles: - with open(worker_configfile) as stream: - worker_config = yaml.safe_load(stream) - worker_app = worker_config["worker_app"] - if worker_app == "synapse.app.homeserver": - # We need to special case all of this to pick up options that may - # be set in the main config file or in this worker config file. - worker_pidfile = worker_config.get("pid_file") or pidfile - worker_cache_factor = ( - worker_config.get("synctl_cache_factor") or cache_factor - ) - worker_cache_factors = ( - worker_config.get("synctl_cache_factors") or cache_factors - ) - # The master process doesn't support using worker_* config. - for key in worker_config: - if key == "worker_app": # But we allow worker_app - continue - assert not key.startswith( - "worker_" - ), "Main process cannot use worker_* config" - else: - worker_pidfile = worker_config["worker_pid_file"] - worker_cache_factor = worker_config.get("synctl_cache_factor") - worker_cache_factors = worker_config.get("synctl_cache_factors", {}) - workers.append( - Worker( - worker_app, - worker_configfile, - worker_pidfile, - worker_cache_factor, - worker_cache_factors, - ) - ) - - action = options.action - - if action == "stop" or action == "restart": - running_pids = [] - for worker in workers: - pid = stop(worker.pidfile, worker.app) - if pid is not None: - running_pids.append(pid) - - if start_stop_synapse: - pid = stop(pidfile, MAIN_PROCESS) - if pid is not None: - running_pids.append(pid) - - if len(running_pids) > 0: - write("Waiting for processes to exit...") - for running_pid in running_pids: - while pid_running(running_pid): - time.sleep(0.2) - write("All processes exited") - - if action == "start" or action == "restart": - error = False - if start_stop_synapse: - if not start(pidfile, MAIN_PROCESS, (configfile,), options.daemonize): - error = True - - for worker in workers: - env = os.environ.copy() - - if worker.cache_factor: - os.environ["SYNAPSE_CACHE_FACTOR"] = str(worker.cache_factor) - - for cache_name, factor in worker.cache_factors.items(): - os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor) - - if not start( - worker.pidfile, - worker.app, - (configfile, worker.configfile), - options.daemonize, - ): - error = True - - # Reset env back to the original - os.environ.clear() - os.environ.update(env) - - if error: - exit(1) - - -if __name__ == "__main__": - main() diff --git a/tox.ini b/tox.ini index 04d282a705..f1f96b27ea 100644 --- a/tox.ini +++ b/tox.ini @@ -42,7 +42,6 @@ lint_targets = scripts-dev stubs contrib - synctl synmark .ci docker -- cgit 1.5.1 From 12d1f82db213603972d60be3f46f6a36c3c2330f Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 17 Mar 2022 13:46:05 +0000 Subject: Generate announcement links in release script (#12242) --- changelog.d/12242.misc | 1 + scripts-dev/release.py | 41 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 changelog.d/12242.misc (limited to 'scripts-dev') diff --git a/changelog.d/12242.misc b/changelog.d/12242.misc new file mode 100644 index 0000000000..38e7e0f7d1 --- /dev/null +++ b/changelog.d/12242.misc @@ -0,0 +1 @@ +Generate announcement links in the release script. diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 046453e65f..685fa32b03 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -66,11 +66,15 @@ def cli(): ./scripts-dev/release.py tag - # ... wait for asssets to build ... + # ... wait for assets to build ... ./scripts-dev/release.py publish ./scripts-dev/release.py upload + # Optional: generate some nice links for the announcement + + ./scripts-dev/release.py upload + If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the `tag`/`publish` command, then a new draft release will be created/published. """ @@ -415,6 +419,41 @@ def upload(): ) +@cli.command() +def announce(): + """Generate markdown to announce the release.""" + + current_version, _, _ = parse_version_from_module() + tag_name = f"v{current_version}" + + click.echo( + f""" +Hi everyone. Synapse {current_version} has just been released. + +[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) |\ +[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \ +[debs](https://packages.matrix.org/debian/) | \ +[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)""" + ) + + if "rc" in tag_name: + click.echo( + """ +Announce the RC in +- #homeowners:matrix.org (Synapse Announcements) +- #synapse-dev:matrix.org""" + ) + else: + click.echo( + """ +Announce the release in +- #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic +- #synapse:matrix.org (Synapse Admins), bumping the version in the topic +- #synapse-dev:matrix.org +- #synapse-package-maintainers:matrix.org""" + ) + + def parse_version_from_module() -> Tuple[ version.Version, redbaron.RedBaron, redbaron.Node ]: -- cgit 1.5.1