summary refs log tree commit diff
path: root/scripts-dev
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2022-09-14 11:13:37 +0100
committerErik Johnston <erik@matrix.org>2022-09-14 11:13:37 +0100
commit69beef22c2c99a07d678b78a282bfe5fdd2cc6f2 (patch)
tree3a7f66596d9a9028cdf6ff6186bf80d86dfa5b21 /scripts-dev
parentfix tests to reflect new reality (diff)
parentFix bug in device list caching when remote users leave rooms (#13749) (diff)
downloadsynapse-69beef22c2c99a07d678b78a282bfe5fdd2cc6f2.tar.xz
Merge remote-tracking branch 'origin/develop' into shay/batch_events
Diffstat (limited to 'scripts-dev')
-rwxr-xr-xscripts-dev/make_full_schema.sh48
-rwxr-xr-xscripts-dev/release.py146
2 files changed, 158 insertions, 36 deletions
diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh

index f0e22d4ca2..61394360ce 100755 --- a/scripts-dev/make_full_schema.sh +++ b/scripts-dev/make_full_schema.sh
@@ -9,8 +9,10 @@ export PGHOST="localhost" POSTGRES_DB_NAME="synapse_full_schema.$$" -SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite" -POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres" +SQLITE_SCHEMA_FILE="schema.sql.sqlite" +SQLITE_ROWS_FILE="rows.sql.sqlite" +POSTGRES_SCHEMA_FILE="full.sql.postgres" +POSTGRES_ROWS_FILE="rows.sql.postgres" REQUIRED_DEPS=("matrix-synapse" "psycopg2") @@ -22,7 +24,7 @@ usage() { echo " Username to connect to local postgres instance. The password will be requested" echo " during script execution." echo "-c" - echo " CI mode. Enables coverage tracking and prints every command that the script runs." + echo " CI mode. Prints every command that the script runs." echo "-o <path>" echo " Directory to output full schema files to." echo "-h" @@ -37,11 +39,6 @@ while getopts "p:co:h" opt; do c) # Print all commands that are being executed set -x - - # Modify required dependencies for coverage - REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess") - - COVERAGE=1 ;; o) command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1) @@ -102,6 +99,7 @@ SQLITE_DB=$TMPDIR/homeserver.db POSTGRES_CONFIG=$TMPDIR/postgres.conf # Ensure these files are delete on script exit +# TODO: the trap should also drop the temp postgres DB trap 'rm -rf $TMPDIR' EXIT cat > "$SQLITE_CONFIG" <<EOF @@ -147,48 +145,34 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG" # Make sure the SQLite3 database is using the latest schema and has no pending background update. echo "Running db background jobs..." -synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG" +synapse/_scripts/update_synapse_database.py --database-config "$SQLITE_CONFIG" --run-background-updates # Create the PostgreSQL database. echo "Creating postgres database..." createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME" -echo "Copying data from SQLite3 to Postgres with synapse_port_db..." -if [ -z "$COVERAGE" ]; then - # No coverage needed - synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG" -else - # Coverage desired - coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG" -fi +echo "Running db background jobs..." +synapse/_scripts/update_synapse_database.py --database-config "$POSTGRES_CONFIG" --run-background-updates + # Delete schema_version, applied_schema_deltas and applied_module_schemas tables # Also delete any shadow tables from fts4 -# This needs to be done after synapse_port_db is run echo "Dropping unwanted db tables..." SQL=" DROP TABLE schema_version; DROP TABLE applied_schema_deltas; DROP TABLE applied_module_schemas; -DROP TABLE event_search_content; -DROP TABLE event_search_segments; -DROP TABLE event_search_segdir; -DROP TABLE event_search_docsize; -DROP TABLE event_search_stat; -DROP TABLE user_directory_search_content; -DROP TABLE user_directory_search_segments; -DROP TABLE user_directory_search_segdir; -DROP TABLE user_directory_search_docsize; -DROP TABLE user_directory_search_stat; " sqlite3 "$SQLITE_DB" <<< "$SQL" psql "$POSTGRES_DB_NAME" -w <<< "$SQL" -echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..." -sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE" +echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_SCHEMA_FILE' and '$OUTPUT_DIR/$SQLITE_ROWS_FILE'..." +sqlite3 "$SQLITE_DB" ".schema --indent" > "$OUTPUT_DIR/$SQLITE_SCHEMA_FILE" +sqlite3 "$SQLITE_DB" ".dump --data-only --nosys" > "$OUTPUT_DIR/$SQLITE_ROWS_FILE" -echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..." -pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE" +echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_SCHEMA_FILE' and '$OUTPUT_DIR/$POSTGRES_ROWS_FILE'..." +pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_DB_NAME" | sed -e '/^$/d' -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_SCHEMA_FILE" +pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_DB_NAME" | sed -e '/^$/d' -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_ROWS_FILE" echo "Cleaning up temporary Postgres database..." dropdb $POSTGRES_DB_NAME diff --git a/scripts-dev/release.py b/scripts-dev/release.py
index 46220c4dd3..6603bc593b 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py
@@ -18,10 +18,12 @@ """ import glob +import json import os import re import subprocess import sys +import time import urllib.request from os import path from tempfile import TemporaryDirectory @@ -71,18 +73,21 @@ def cli() -> None: ./scripts-dev/release.py tag - # ... wait for assets to build ... + # wait for assets to build, either manually or with: + ./scripts-dev/release.py wait-for-actions ./scripts-dev/release.py publish ./scripts-dev/release.py upload - # Optional: generate some nice links for the announcement - ./scripts-dev/release.py merge-back + # Optional: generate some nice links for the announcement ./scripts-dev/release.py announce + Alternatively, `./scripts-dev/release.py full` will do all the above + as well as guiding you through the manual steps. + If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the `tag`/`publish` command, then a new draft release will be created/published. """ @@ -90,6 +95,10 @@ def cli() -> None: @cli.command() def prepare() -> None: + _prepare() + + +def _prepare() -> None: """Do the initial stages of creating a release, including creating release branch, updating changelog and pushing to GitHub. """ @@ -284,6 +293,10 @@ def prepare() -> None: @cli.command() @click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"]) def tag(gh_token: Optional[str]) -> None: + _tag(gh_token) + + +def _tag(gh_token: Optional[str]) -> None: """Tags the release and generates a draft GitHub release""" # Make sure we're in a git repo. @@ -374,6 +387,10 @@ def tag(gh_token: Optional[str]) -> None: @cli.command() @click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True) def publish(gh_token: str) -> None: + _publish(gh_token) + + +def _publish(gh_token: str) -> None: """Publish release on GitHub.""" # Make sure we're in a git repo. @@ -411,6 +428,10 @@ def publish(gh_token: str) -> None: @cli.command() def upload() -> None: + _upload() + + +def _upload() -> None: """Upload release to pypi.""" current_version = get_package_version() @@ -480,7 +501,74 @@ def _merge_into(repo: Repo, source: str, target: str) -> None: @cli.command() +@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False) +def wait_for_actions(gh_token: Optional[str]) -> None: + _wait_for_actions(gh_token) + + +def _wait_for_actions(gh_token: Optional[str]) -> None: + # Find out the version and tag name. + current_version = get_package_version() + tag_name = f"v{current_version}" + + # Authentication is optional on this endpoint, + # but use a token if we have one to reduce the chance of being rate-limited. + url = f"https://api.github.com/repos/matrix-org/synapse/actions/runs?branch={tag_name}" + headers = {"Accept": "application/vnd.github+json"} + if gh_token is not None: + headers["authorization"] = f"token {gh_token}" + req = urllib.request.Request(url, headers=headers) + + time.sleep(10 * 60) + while True: + time.sleep(5 * 60) + response = urllib.request.urlopen(req) + resp = json.loads(response.read()) + + if len(resp["workflow_runs"]) == 0: + continue + + if all( + workflow["status"] != "in_progress" for workflow in resp["workflow_runs"] + ): + success = ( + workflow["status"] == "completed" for workflow in resp["workflow_runs"] + ) + if success: + _notify("Workflows successful. You can now continue the release.") + else: + _notify("Workflows failed.") + click.confirm("Continue anyway?", abort=True) + + break + + +def _notify(message: str) -> None: + # Send a bell character. Most terminals will play a sound or show a notification + # for this. + click.echo(f"\a{message}") + + # Try and run notify-send, but don't raise an Exception if this fails + # (This is best-effort) + # TODO Support other platforms? + subprocess.run( + [ + "notify-send", + "--app-name", + "Synapse Release Script", + "--expire-time", + "3600000", + message, + ] + ) + + +@cli.command() def merge_back() -> None: + _merge_back() + + +def _merge_back() -> None: """Merge the release branch back into the appropriate branches. All branches will be automatically pulled from the remote and the results will be pushed to the remote.""" @@ -519,6 +607,10 @@ def merge_back() -> None: @cli.command() def announce() -> None: + _announce() + + +def _announce() -> None: """Generate markdown to announce the release.""" current_version = get_package_version() @@ -548,10 +640,56 @@ Announce the release in - #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic - #synapse:matrix.org (Synapse Admins), bumping the version in the topic - #synapse-dev:matrix.org -- #synapse-package-maintainers:matrix.org""" +- #synapse-package-maintainers:matrix.org + +Ask the designated people to do the blog and tweets.""" ) +@cli.command() +@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True) +def full(gh_token: str) -> None: + click.echo("1. If this is a security release, read the security wiki page.") + click.echo("2. Check for any release blockers before proceeding.") + click.echo(" https://github.com/matrix-org/synapse/labels/X-Release-Blocker") + + click.confirm("Ready?", abort=True) + + click.echo("\n*** prepare ***") + _prepare() + + click.echo("Deploy to matrix.org and ensure that it hasn't fallen over.") + click.echo("Remember to silence the alerts to prevent alert spam.") + click.confirm("Deployed?", abort=True) + + click.echo("\n*** tag ***") + _tag(gh_token) + + click.echo("\n*** wait for actions ***") + _wait_for_actions(gh_token) + + click.echo("\n*** publish ***") + _publish(gh_token) + + click.echo("\n*** upload ***") + _upload() + + click.echo("\n*** merge back ***") + _merge_back() + + click.echo("\nUpdate the Debian repository") + click.confirm("Started updating Debian repository?", abort=True) + + click.echo("\nWait for all release methods to be ready.") + # Docker should be ready because it was done by the workflows earlier + # PyPI should be ready because we just ran upload(). + # TODO Automatically poll until the Debs have made it to packages.matrix.org + click.confirm("Debs ready?", abort=True) + + click.echo("\n*** announce ***") + _announce() + + def get_package_version() -> version.Version: version_string = subprocess.check_output(["poetry", "version", "--short"]).decode( "utf-8"