summary refs log tree commit diff
path: root/scripts-dev
diff options
context:
space:
mode:
authorAndrew Morgan <1342360+anoadragon453@users.noreply.github.com>2020-05-28 15:12:35 +0100
committerGitHub <noreply@github.com>2020-05-28 15:12:35 +0100
commit0aa9449cd412a19550a74d8d6d4b1714746ebba1 (patch)
treee5915221b57a9dc9c53aa293a373cd8fc46412c6 /scripts-dev
parentFixes an attribute error when using the default display name during registrat... (diff)
parentMove sql schema delta files to their new location (diff)
downloadsynapse-0aa9449cd412a19550a74d8d6d4b1714746ebba1.tar.xz
Merge pull request #39 from matrix-org/dinsic-release-v1.12.x
Merge Synapse release v1.12.0 into 'dinsic'

Accompanying Sytest PR: https://github.com/matrix-org/sytest/issues/843
Diffstat (limited to 'scripts-dev')
-rwxr-xr-xscripts-dev/build_debian_packages2
-rw-r--r--scripts-dev/check_auth.py58
-rw-r--r--scripts-dev/check_event_hash.py54
-rw-r--r--scripts-dev/check_signature.py5
-rwxr-xr-xscripts-dev/config-lint.sh10
-rw-r--r--scripts-dev/convert_server_keys.py4
-rwxr-xr-xscripts-dev/definitions.py62
-rwxr-xr-xscripts-dev/federation_client.py59
-rwxr-xr-xscripts-dev/generate_sample_config10
-rw-r--r--scripts-dev/hash_history.py4
-rwxr-xr-xscripts-dev/lint.sh21
-rwxr-xr-xscripts-dev/list_url_patterns.py4
-rwxr-xr-xscripts-dev/make_full_schema.sh184
-rw-r--r--scripts-dev/tail-synapse.py2
-rwxr-xr-xscripts-dev/update_database101
15 files changed, 411 insertions, 169 deletions
diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages

index 93305ee9b1..84eaec6a95 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages
@@ -20,11 +20,13 @@ from concurrent.futures import ThreadPoolExecutor DISTS = ( "debian:stretch", "debian:buster", + "debian:bullseye", "debian:sid", "ubuntu:xenial", "ubuntu:bionic", "ubuntu:cosmic", "ubuntu:disco", + "ubuntu:eoan", ) DESC = '''\ diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py deleted file mode 100644
index b3d11f49ec..0000000000 --- a/scripts-dev/check_auth.py +++ /dev/null
@@ -1,58 +0,0 @@ -from __future__ import print_function - -import argparse -import itertools -import json -import sys - -from mock import Mock - -from synapse.api.auth import Auth -from synapse.events import FrozenEvent - - -def check_auth(auth, auth_chain, events): - auth_chain.sort(key=lambda e: e.depth) - - auth_map = {e.event_id: e for e in auth_chain} - - create_events = {} - for e in auth_chain: - if e.type == "m.room.create": - create_events[e.room_id] = e - - for e in itertools.chain(auth_chain, events): - auth_events_list = [auth_map[i] for i, _ in e.auth_events] - - auth_events = {(e.type, e.state_key): e for e in auth_events_list} - - auth_events[("m.room.create", "")] = create_events[e.room_id] - - try: - auth.check(e, auth_events=auth_events) - except Exception as ex: - print("Failed:", e.event_id, e.type, e.state_key) - print("Auth_events:", auth_events) - print(ex) - print(json.dumps(e.get_dict(), sort_keys=True, indent=4)) - # raise - print("Success:", e.event_id, e.type, e.state_key) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - - parser.add_argument( - 'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin - ) - - args = parser.parse_args() - - js = json.load(args.json) - - auth = Auth(Mock()) - check_auth( - auth, - [FrozenEvent(d) for d in js["auth_chain"]], - [FrozenEvent(d) for d in js.get("pdus", [])], - ) diff --git a/scripts-dev/check_event_hash.py b/scripts-dev/check_event_hash.py deleted file mode 100644
index 8535f99697..0000000000 --- a/scripts-dev/check_event_hash.py +++ /dev/null
@@ -1,54 +0,0 @@ -import argparse -import hashlib -import json -import logging -import sys - -from unpaddedbase64 import encode_base64 - -from synapse.crypto.event_signing import ( - check_event_content_hash, - compute_event_reference_hash, -) - - -class dictobj(dict): - def __init__(self, *args, **kargs): - dict.__init__(self, *args, **kargs) - self.__dict__ = self - - def get_dict(self): - return dict(self) - - def get_full_dict(self): - return dict(self) - - def get_pdu_json(self): - return dict(self) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin - ) - args = parser.parse_args() - logging.basicConfig() - - event_json = dictobj(json.load(args.input_json)) - - algorithms = {"sha256": hashlib.sha256} - - for alg_name in event_json.hashes: - if check_event_content_hash(event_json, algorithms[alg_name]): - print("PASS content hash %s" % (alg_name,)) - else: - print("FAIL content hash %s" % (alg_name,)) - - for algorithm in algorithms.values(): - name, h_bytes = compute_event_reference_hash(event_json, algorithm) - print("Reference hash %s: %s" % (name, encode_base64(h_bytes))) - - -if __name__ == "__main__": - main() diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py
index 612f17ca7f..ecda103cf7 100644 --- a/scripts-dev/check_signature.py +++ b/scripts-dev/check_signature.py
@@ -1,4 +1,3 @@ - import argparse import json import logging @@ -40,7 +39,7 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument("signature_name") parser.add_argument( - "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin + "input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin ) args = parser.parse_args() @@ -69,5 +68,5 @@ def main(): print("FAIL %s" % (key_id,)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts-dev/config-lint.sh b/scripts-dev/config-lint.sh new file mode 100755
index 0000000000..189ca66535 --- /dev/null +++ b/scripts-dev/config-lint.sh
@@ -0,0 +1,10 @@ +#!/bin/bash +# Find linting errors in Synapse's default config file. +# Exits with 0 if there are no problems, or another code otherwise. + +# Fix non-lowercase true/false values +sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml +rm docs/sample_config.yaml.bak + +# Check if anything changed +git diff --exit-code docs/sample_config.yaml diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index ac152b5c42..06b4c1e2ff 100644 --- a/scripts-dev/convert_server_keys.py +++ b/scripts-dev/convert_server_keys.py
@@ -103,7 +103,7 @@ def main(): yaml.safe_dump(result, sys.stdout, default_flow_style=False) - rows = list(row for server, json in result.items() for row in rows_v2(server, json)) + rows = [row for server, json in result.items() for row in rows_v2(server, json)] cursor = connection.cursor() cursor.executemany( @@ -116,5 +116,5 @@ def main(): connection.commit() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
index 1deb0fe2b7..9eddb6d515 100755 --- a/scripts-dev/definitions.py +++ b/scripts-dev/definitions.py
@@ -19,10 +19,10 @@ class DefinitionVisitor(ast.NodeVisitor): self.names = {} self.attrs = set() self.definitions = { - 'def': self.functions, - 'class': self.classes, - 'names': self.names, - 'attrs': self.attrs, + "def": self.functions, + "class": self.classes, + "names": self.names, + "attrs": self.attrs, } def visit_Name(self, node): @@ -47,23 +47,23 @@ class DefinitionVisitor(ast.NodeVisitor): def non_empty(defs): - functions = {name: non_empty(f) for name, f in defs['def'].items()} - classes = {name: non_empty(f) for name, f in defs['class'].items()} + functions = {name: non_empty(f) for name, f in defs["def"].items()} + classes = {name: non_empty(f) for name, f in defs["class"].items()} result = {} if functions: - result['def'] = functions + result["def"] = functions if classes: - result['class'] = classes - names = defs['names'] + result["class"] = classes + names = defs["names"] uses = [] - for name in names.get('Load', ()): - if name not in names.get('Param', ()) and name not in names.get('Store', ()): + for name in names.get("Load", ()): + if name not in names.get("Param", ()) and name not in names.get("Store", ()): uses.append(name) - uses.extend(defs['attrs']) + uses.extend(defs["attrs"]) if uses: - result['uses'] = uses - result['names'] = names - result['attrs'] = defs['attrs'] + result["uses"] = uses + result["names"] = names + result["attrs"] = defs["attrs"] return result @@ -81,33 +81,33 @@ def definitions_in_file(filepath): def defined_names(prefix, defs, names): - for name, funcs in defs.get('def', {}).items(): - names.setdefault(name, {'defined': []})['defined'].append(prefix + name) + for name, funcs in defs.get("def", {}).items(): + names.setdefault(name, {"defined": []})["defined"].append(prefix + name) defined_names(prefix + name + ".", funcs, names) - for name, funcs in defs.get('class', {}).items(): - names.setdefault(name, {'defined': []})['defined'].append(prefix + name) + for name, funcs in defs.get("class", {}).items(): + names.setdefault(name, {"defined": []})["defined"].append(prefix + name) defined_names(prefix + name + ".", funcs, names) def used_names(prefix, item, defs, names): - for name, funcs in defs.get('def', {}).items(): + for name, funcs in defs.get("def", {}).items(): used_names(prefix + name + ".", name, funcs, names) - for name, funcs in defs.get('class', {}).items(): + for name, funcs in defs.get("class", {}).items(): used_names(prefix + name + ".", name, funcs, names) - path = prefix.rstrip('.') - for used in defs.get('uses', ()): + path = prefix.rstrip(".") + for used in defs.get("uses", ()): if used in names: if item: - names[item].setdefault('uses', []).append(used) - names[used].setdefault('used', {}).setdefault(item, []).append(path) + names[item].setdefault("uses", []).append(used) + names[used].setdefault("used", {}).setdefault(item, []).append(path) -if __name__ == '__main__': +if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Find definitions.') + parser = argparse.ArgumentParser(description="Find definitions.") parser.add_argument( "--unused", action="store_true", help="Only list unused definitions" ) @@ -119,7 +119,7 @@ if __name__ == '__main__': ) parser.add_argument( "directories", - nargs='+', + nargs="+", metavar="DIR", help="Directories to search for definitions", ) @@ -164,7 +164,7 @@ if __name__ == '__main__': continue if ignore and any(pattern.match(name) for pattern in ignore): continue - if args.unused and definition.get('used'): + if args.unused and definition.get("used"): continue result[name] = definition @@ -196,9 +196,9 @@ if __name__ == '__main__': continue result[name] = definition - if args.format == 'yaml': + if args.format == "yaml": yaml.dump(result, sys.stdout, default_flow_style=False) - elif args.format == 'dot': + elif args.format == "dot": print("digraph {") for name, entry in result.items(): print(name) diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py
index e0287c8c6c..7c19e405d4 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py
@@ -21,7 +21,8 @@ import argparse import base64 import json import sys -from urlparse import urlparse, urlunparse + +from six.moves.urllib import parse as urlparse import nacl.signing import requests @@ -62,7 +63,7 @@ def encode_canonical_json(value): # Encode code-points outside of ASCII as UTF-8 rather than \u escapes ensure_ascii=False, # Remove unecessary white space. - separators=(',', ':'), + separators=(",", ":"), # Sort the keys of dictionaries. sort_keys=True, # Encode the resulting unicode as UTF-8 bytes. @@ -144,8 +145,8 @@ def request_json(method, origin_name, origin_key, destination, path, content): authorization_headers = [] for key, sig in signed_json["signatures"][origin_name].items(): - header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig) - authorization_headers.append(bytes(header)) + header = 'X-Matrix origin=%s,key="%s",sig="%s"' % (origin_name, key, sig) + authorization_headers.append(header.encode("ascii")) print("Authorization: %s" % header, file=sys.stderr) dest = "matrix://%s%s" % (destination, path) @@ -160,11 +161,7 @@ def request_json(method, origin_name, origin_key, destination, path, content): headers["Content-Type"] = "application/json" result = s.request( - method=method, - url=dest, - headers=headers, - verify=False, - data=content, + method=method, url=dest, headers=headers, verify=False, data=content ) sys.stderr.write("Status Code: %d\n" % (result.status_code,)) return result.json() @@ -240,18 +237,18 @@ def main(): def read_args_from_config(args): - with open(args.config, 'r') as fh: + with open(args.config, "r") as fh: config = yaml.safe_load(fh) if not args.server_name: - args.server_name = config['server_name'] + args.server_name = config["server_name"] if not args.signing_key_path: - args.signing_key_path = config['signing_key_path'] + args.signing_key_path = config["signing_key_path"] class MatrixConnectionAdapter(HTTPAdapter): @staticmethod - def lookup(s): - if s[-1] == ']': + def lookup(s, skip_well_known=False): + if s[-1] == "]": # ipv6 literal (with no port) return s, 8448 @@ -263,19 +260,49 @@ class MatrixConnectionAdapter(HTTPAdapter): raise ValueError("Invalid host:port '%s'" % s) return out[0], port + # try a .well-known lookup + if not skip_well_known: + well_known = MatrixConnectionAdapter.get_well_known(s) + if well_known: + return MatrixConnectionAdapter.lookup(well_known, skip_well_known=True) + try: srv = srvlookup.lookup("matrix", "tcp", s)[0] return srv.host, srv.port except Exception: return s, 8448 + @staticmethod + def get_well_known(server_name): + uri = "https://%s/.well-known/matrix/server" % (server_name,) + print("fetching %s" % (uri,), file=sys.stderr) + + try: + resp = requests.get(uri) + if resp.status_code != 200: + print("%s gave %i" % (uri, resp.status_code), file=sys.stderr) + return None + + parsed_well_known = resp.json() + if not isinstance(parsed_well_known, dict): + raise Exception("not a dict") + if "m.server" not in parsed_well_known: + raise Exception("Missing key 'm.server'") + new_name = parsed_well_known["m.server"] + print("well-known lookup gave %s" % (new_name,), file=sys.stderr) + return new_name + + except Exception as e: + print("Invalid response from %s: %s" % (uri, e), file=sys.stderr) + return None + def get_connection(self, url, proxies=None): - parsed = urlparse(url) + parsed = urlparse.urlparse(url) (host, port) = self.lookup(parsed.netloc) netloc = "%s:%d" % (host, port) print("Connecting to %s" % (netloc,), file=sys.stderr) - url = urlunparse( + url = urlparse.urlunparse( ("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment) ) return super(MatrixConnectionAdapter, self).get_connection(url, proxies) diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config
index 5e33b9b549..9cb4630a5c 100755 --- a/scripts-dev/generate_sample_config +++ b/scripts-dev/generate_sample_config
@@ -7,12 +7,22 @@ set -e cd `dirname $0`/.. SAMPLE_CONFIG="docs/sample_config.yaml" +SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml" + +check() { + diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1 +} if [ "$1" == "--check" ]; then diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || { echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2 exit 1 } + diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || { + echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2 + exit 1 + } else ./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG" + ./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG" fi diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index 514d80fa60..bf3862a386 100644 --- a/scripts-dev/hash_history.py +++ b/scripts-dev/hash_history.py
@@ -27,7 +27,7 @@ class Store(object): "_store_pdu_reference_hash_txn" ] _store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"] - _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"] + simple_insert_txn = SQLBaseStore.__dict__["simple_insert_txn"] store = Store() @@ -79,5 +79,5 @@ def main(): conn.commit() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh new file mode 100755
index 0000000000..34c4854e11 --- /dev/null +++ b/scripts-dev/lint.sh
@@ -0,0 +1,21 @@ +#!/bin/sh +# +# Runs linting scripts over the local Synapse checkout +# isort - sorts import statements +# flake8 - lints and finds mistakes +# black - opinionated code formatter + +set -e + +if [ $# -ge 1 ] +then + files=$* +else + files="synapse tests scripts-dev scripts" +fi + +echo "Linting these locations: $files" +isort -y -rc $files +flake8 $files +python3 -m black $files +./scripts-dev/config-lint.sh diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py
index 62e5a07472..26ad7c67f4 100755 --- a/scripts-dev/list_url_patterns.py +++ b/scripts-dev/list_url_patterns.py
@@ -35,11 +35,11 @@ def find_patterns_in_file(filepath): find_patterns_in_code(f.read()) -parser = argparse.ArgumentParser(description='Find url patterns.') +parser = argparse.ArgumentParser(description="Find url patterns.") parser.add_argument( "directories", - nargs='+', + nargs="+", metavar="DIR", help="Directories to search for definitions", ) diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh new file mode 100755
index 0000000000..60e8970a35 --- /dev/null +++ b/scripts-dev/make_full_schema.sh
@@ -0,0 +1,184 @@ +#!/bin/bash +# +# This script generates SQL files for creating a brand new Synapse DB with the latest +# schema, on both SQLite3 and Postgres. +# +# It does so by having Synapse generate an up-to-date SQLite DB, then running +# synapse_port_db to convert it to Postgres. It then dumps the contents of both. + +POSTGRES_HOST="localhost" +POSTGRES_DB_NAME="synapse_full_schema.$$" + +SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite" +POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres" + +REQUIRED_DEPS=("matrix-synapse" "psycopg2") + +usage() { + echo + echo "Usage: $0 -p <postgres_username> -o <path> [-c] [-n] [-h]" + echo + echo "-p <postgres_username>" + echo " Username to connect to local postgres instance. The password will be requested" + echo " during script execution." + echo "-c" + echo " CI mode. Enables coverage tracking and prints every command that the script runs." + echo "-o <path>" + echo " Directory to output full schema files to." + echo "-h" + echo " Display this help text." +} + +while getopts "p:co:h" opt; do + case $opt in + p) + POSTGRES_USERNAME=$OPTARG + ;; + c) + # Print all commands that are being executed + set -x + + # Modify required dependencies for coverage + REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess") + + COVERAGE=1 + ;; + o) + command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1) + OUTPUT_DIR="$(realpath "$OPTARG")" + ;; + h) + usage + exit + ;; + \?) + echo "ERROR: Invalid option: -$OPTARG" >&2 + usage + exit + ;; + esac +done + +# Check that required dependencies are installed +unsatisfied_requirements=() +for dep in "${REQUIRED_DEPS[@]}"; do + pip show "$dep" --quiet || unsatisfied_requirements+=("$dep") +done +if [ ${#unsatisfied_requirements} -ne 0 ]; then + echo "Please install the following python packages: ${unsatisfied_requirements[*]}" + exit 1 +fi + +if [ -z "$POSTGRES_USERNAME" ]; then + echo "No postgres username supplied" + usage + exit 1 +fi + +if [ -z "$OUTPUT_DIR" ]; then + echo "No output directory supplied" + usage + exit 1 +fi + +# Create the output directory if it doesn't exist +mkdir -p "$OUTPUT_DIR" + +read -rsp "Postgres password for '$POSTGRES_USERNAME': " POSTGRES_PASSWORD +echo "" + +# Exit immediately if a command fails +set -e + +# cd to root of the synapse directory +cd "$(dirname "$0")/.." + +# Create temporary SQLite and Postgres homeserver db configs and key file +TMPDIR=$(mktemp -d) +KEY_FILE=$TMPDIR/test.signing.key # default Synapse signing key path +SQLITE_CONFIG=$TMPDIR/sqlite.conf +SQLITE_DB=$TMPDIR/homeserver.db +POSTGRES_CONFIG=$TMPDIR/postgres.conf + +# Ensure these files are delete on script exit +trap 'rm -rf $TMPDIR' EXIT + +cat > "$SQLITE_CONFIG" <<EOF +server_name: "test" + +signing_key_path: "$KEY_FILE" +macaroon_secret_key: "abcde" + +report_stats: false + +database: + name: "sqlite3" + args: + database: "$SQLITE_DB" + +# Suppress the key server warning. +trusted_key_servers: [] +EOF + +cat > "$POSTGRES_CONFIG" <<EOF +server_name: "test" + +signing_key_path: "$KEY_FILE" +macaroon_secret_key: "abcde" + +report_stats: false + +database: + name: "psycopg2" + args: + user: "$POSTGRES_USERNAME" + host: "$POSTGRES_HOST" + password: "$POSTGRES_PASSWORD" + database: "$POSTGRES_DB_NAME" + +# Suppress the key server warning. +trusted_key_servers: [] +EOF + +# Generate the server's signing key. +echo "Generating SQLite3 db schema..." +python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG" + +# Make sure the SQLite3 database is using the latest schema and has no pending background update. +echo "Running db background jobs..." +scripts-dev/update_database --database-config "$SQLITE_CONFIG" + +# Create the PostgreSQL database. +echo "Creating postgres database..." +createdb $POSTGRES_DB_NAME + +echo "Copying data from SQLite3 to Postgres with synapse_port_db..." +if [ -z "$COVERAGE" ]; then + # No coverage needed + scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG" +else + # Coverage desired + coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG" +fi + +# Delete schema_version, applied_schema_deltas and applied_module_schemas tables +# This needs to be done after synapse_port_db is run +echo "Dropping unwanted db tables..." +SQL=" +DROP TABLE schema_version; +DROP TABLE applied_schema_deltas; +DROP TABLE applied_module_schemas; +" +sqlite3 "$SQLITE_DB" <<< "$SQL" +psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL" + +echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..." +sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE" + +echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..." +pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE" + +echo "Cleaning up temporary Postgres database..." +dropdb $POSTGRES_DB_NAME + +echo "Done! Files dumped to: $OUTPUT_DIR" diff --git a/scripts-dev/tail-synapse.py b/scripts-dev/tail-synapse.py
index 7c9985d9f0..44e3a6dbf1 100644 --- a/scripts-dev/tail-synapse.py +++ b/scripts-dev/tail-synapse.py
@@ -63,5 +63,5 @@ def main(): streams[update.name] = update.position -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts-dev/update_database b/scripts-dev/update_database new file mode 100755
index 0000000000..94aa8758b4 --- /dev/null +++ b/scripts-dev/update_database
@@ -0,0 +1,101 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import logging +import sys + +import yaml + +from twisted.internet import defer, reactor + +import synapse +from synapse.config.homeserver import HomeServerConfig +from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.server import HomeServer +from synapse.storage import DataStore +from synapse.util.versionstring import get_version_string + +logger = logging.getLogger("update_database") + + +class MockHomeserver(HomeServer): + DATASTORE_CLASS = DataStore + + def __init__(self, config, **kwargs): + super(MockHomeserver, self).__init__( + config.server_name, reactor=reactor, config=config, **kwargs + ) + + self.version_string = "Synapse/"+get_version_string(synapse) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=( + "Updates a synapse database to the latest schema and runs background updates" + " on it." + ) + ) + parser.add_argument("-v", action="store_true") + parser.add_argument( + "--database-config", + type=argparse.FileType("r"), + required=True, + help="A database config file for either a SQLite3 database or a PostgreSQL one.", + ) + + args = parser.parse_args() + + logging_config = { + "level": logging.DEBUG if args.v else logging.INFO, + "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s", + } + + logging.basicConfig(**logging_config) + + # Load, process and sanity-check the config. + hs_config = yaml.safe_load(args.database_config) + + if "database" not in hs_config: + sys.stderr.write("The configuration file must have a 'database' section.\n") + sys.exit(4) + + config = HomeServerConfig() + config.parse_config_dict(hs_config, "", "") + + # Instantiate and initialise the homeserver object. + hs = MockHomeserver(config) + + # Setup instantiates the store within the homeserver object and updates the + # DB. + hs.setup() + store = hs.get_datastore() + + async def run_background_updates(): + await store.db.updates.run_background_updates(sleep=False) + # Stop the reactor to exit the script once every background update is run. + reactor.stop() + + def run(): + # Apply all background updates on the database. + defer.ensureDeferred( + run_as_background_process("background_updates", run_background_updates) + ) + + reactor.callWhenRunning(run) + + reactor.run()