diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages
index 93305ee9b1..e6f4bd1dca 100755
--- a/scripts-dev/build_debian_packages
+++ b/scripts-dev/build_debian_packages
@@ -20,11 +20,12 @@ from concurrent.futures import ThreadPoolExecutor
DISTS = (
"debian:stretch",
"debian:buster",
+ "debian:bullseye",
"debian:sid",
"ubuntu:xenial",
"ubuntu:bionic",
- "ubuntu:cosmic",
- "ubuntu:disco",
+ "ubuntu:eoan",
+ "ubuntu:focal",
)
DESC = '''\
diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment
index 0ec5075e79..98a618f6b2 100755
--- a/scripts-dev/check-newsfragment
+++ b/scripts-dev/check-newsfragment
@@ -7,7 +7,9 @@ set -e
# make sure that origin/develop is up to date
git remote set-branches --add origin develop
-git fetch origin develop
+git fetch -q origin develop
+
+pr="$BUILDKITE_PULL_REQUEST"
# if there are changes in the debian directory, check that the debian changelog
# has been updated
@@ -20,20 +22,30 @@ fi
# if there are changes *outside* the debian directory, check that the
# newsfragments have been updated.
-if git diff --name-only FETCH_HEAD... | grep -qv '^debian/'; then
- tox -e check-newsfragment
+if ! git diff --name-only FETCH_HEAD... | grep -qv '^debian/'; then
+ exit 0
fi
+tox -qe check-newsfragment
+
echo
echo "--------------------------"
echo
-# check that any new newsfiles on this branch end with a full stop.
+matched=0
for f in `git diff --name-only FETCH_HEAD... -- changelog.d`; do
+ # check that any modified newsfiles on this branch end with a full stop.
lastchar=`tr -d '\n' < $f | tail -c 1`
if [ $lastchar != '.' -a $lastchar != '!' ]; then
echo -e "\e[31mERROR: newsfragment $f does not end with a '.' or '!'\e[39m" >&2
exit 1
fi
+
+ # see if this newsfile corresponds to the right PR
+ [[ -n "$pr" && "$f" == changelog.d/"$pr".* ]] && matched=1
done
+if [[ -n "$pr" && "$matched" -eq 0 ]]; then
+ echo -e "\e[31mERROR: Did not find a news fragment with the right number: expected changelog.d/$pr.*.\e[39m" >&2
+ exit 1
+fi
diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py
deleted file mode 100644
index 2a1c5f39d4..0000000000
--- a/scripts-dev/check_auth.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from __future__ import print_function
-
-import argparse
-import itertools
-import json
-import sys
-
-from mock import Mock
-
-from synapse.api.auth import Auth
-from synapse.events import FrozenEvent
-
-
-def check_auth(auth, auth_chain, events):
- auth_chain.sort(key=lambda e: e.depth)
-
- auth_map = {e.event_id: e for e in auth_chain}
-
- create_events = {}
- for e in auth_chain:
- if e.type == "m.room.create":
- create_events[e.room_id] = e
-
- for e in itertools.chain(auth_chain, events):
- auth_events_list = [auth_map[i] for i, _ in e.auth_events]
-
- auth_events = {(e.type, e.state_key): e for e in auth_events_list}
-
- auth_events[("m.room.create", "")] = create_events[e.room_id]
-
- try:
- auth.check(e, auth_events=auth_events)
- except Exception as ex:
- print("Failed:", e.event_id, e.type, e.state_key)
- print("Auth_events:", auth_events)
- print(ex)
- print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
- # raise
- print("Success:", e.event_id, e.type, e.state_key)
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
-
- parser.add_argument(
- "json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
- )
-
- args = parser.parse_args()
-
- js = json.load(args.json)
-
- auth = Auth(Mock())
- check_auth(
- auth,
- [FrozenEvent(d) for d in js["auth_chain"]],
- [FrozenEvent(d) for d in js.get("pdus", [])],
- )
diff --git a/scripts-dev/config-lint.sh b/scripts-dev/config-lint.sh
new file mode 100755
index 0000000000..189ca66535
--- /dev/null
+++ b/scripts-dev/config-lint.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+# Find linting errors in Synapse's default config file.
+# Exits with 0 if there are no problems, or another code otherwise.
+
+# Fix non-lowercase true/false values
+sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
+rm docs/sample_config.yaml.bak
+
+# Check if anything changed
+git diff --exit-code docs/sample_config.yaml
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index 179be61c30..961dc59f11 100644
--- a/scripts-dev/convert_server_keys.py
+++ b/scripts-dev/convert_server_keys.py
@@ -3,8 +3,6 @@ import json
import sys
import time
-import six
-
import psycopg2
import yaml
from canonicaljson import encode_canonical_json
@@ -12,10 +10,7 @@ from signedjson.key import read_signing_keys
from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64
-if six.PY2:
- db_type = six.moves.builtins.buffer
-else:
- db_type = memoryview
+db_binary_type = memoryview
def select_v1_keys(connection):
@@ -72,7 +67,7 @@ def rows_v2(server, json):
valid_until = json["valid_until_ts"]
key_json = encode_canonical_json(json)
for key_id in json["verify_keys"]:
- yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
+ yield (server, key_id, "-", valid_until, valid_until, db_binary_type(key_json))
def main():
@@ -103,7 +98,7 @@ def main():
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
- rows = list(row for server, json in result.items() for row in rows_v2(server, json))
+ rows = [row for server, json in result.items() for row in rows_v2(server, json)]
cursor = connection.cursor()
cursor.executemany(
diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config
index 5e33b9b549..9cb4630a5c 100755
--- a/scripts-dev/generate_sample_config
+++ b/scripts-dev/generate_sample_config
@@ -7,12 +7,22 @@ set -e
cd `dirname $0`/..
SAMPLE_CONFIG="docs/sample_config.yaml"
+SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
+
+check() {
+ diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1
+}
if [ "$1" == "--check" ]; then
diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || {
echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
exit 1
}
+ diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || {
+ echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
+ exit 1
+ }
else
./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
+ ./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG"
fi
diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index d20f6db176..bf3862a386 100644
--- a/scripts-dev/hash_history.py
+++ b/scripts-dev/hash_history.py
@@ -27,7 +27,7 @@ class Store(object):
"_store_pdu_reference_hash_txn"
]
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
- _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
+ simple_insert_txn = SQLBaseStore.__dict__["simple_insert_txn"]
store = Store()
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index ebb4d69f86..34c4854e11 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -7,6 +7,15 @@
set -e
-isort -y -rc synapse tests scripts-dev scripts
-flake8 synapse tests
-python3 -m black synapse tests scripts-dev scripts
+if [ $# -ge 1 ]
+then
+ files=$*
+else
+ files="synapse tests scripts-dev scripts"
+fi
+
+echo "Linting these locations: $files"
+isort -y -rc $files
+flake8 $files
+python3 -m black $files
+./scripts-dev/config-lint.sh
diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh
new file mode 100755
index 0000000000..60e8970a35
--- /dev/null
+++ b/scripts-dev/make_full_schema.sh
@@ -0,0 +1,184 @@
+#!/bin/bash
+#
+# This script generates SQL files for creating a brand new Synapse DB with the latest
+# schema, on both SQLite3 and Postgres.
+#
+# It does so by having Synapse generate an up-to-date SQLite DB, then running
+# synapse_port_db to convert it to Postgres. It then dumps the contents of both.
+
+POSTGRES_HOST="localhost"
+POSTGRES_DB_NAME="synapse_full_schema.$$"
+
+SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite"
+POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres"
+
+REQUIRED_DEPS=("matrix-synapse" "psycopg2")
+
+usage() {
+ echo
+ echo "Usage: $0 -p <postgres_username> -o <path> [-c] [-n] [-h]"
+ echo
+ echo "-p <postgres_username>"
+ echo " Username to connect to local postgres instance. The password will be requested"
+ echo " during script execution."
+ echo "-c"
+ echo " CI mode. Enables coverage tracking and prints every command that the script runs."
+ echo "-o <path>"
+ echo " Directory to output full schema files to."
+ echo "-h"
+ echo " Display this help text."
+}
+
+while getopts "p:co:h" opt; do
+ case $opt in
+ p)
+ POSTGRES_USERNAME=$OPTARG
+ ;;
+ c)
+ # Print all commands that are being executed
+ set -x
+
+ # Modify required dependencies for coverage
+ REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess")
+
+ COVERAGE=1
+ ;;
+ o)
+ command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1)
+ OUTPUT_DIR="$(realpath "$OPTARG")"
+ ;;
+ h)
+ usage
+ exit
+ ;;
+ \?)
+ echo "ERROR: Invalid option: -$OPTARG" >&2
+ usage
+ exit
+ ;;
+ esac
+done
+
+# Check that required dependencies are installed
+unsatisfied_requirements=()
+for dep in "${REQUIRED_DEPS[@]}"; do
+ pip show "$dep" --quiet || unsatisfied_requirements+=("$dep")
+done
+if [ ${#unsatisfied_requirements} -ne 0 ]; then
+ echo "Please install the following python packages: ${unsatisfied_requirements[*]}"
+ exit 1
+fi
+
+if [ -z "$POSTGRES_USERNAME" ]; then
+ echo "No postgres username supplied"
+ usage
+ exit 1
+fi
+
+if [ -z "$OUTPUT_DIR" ]; then
+ echo "No output directory supplied"
+ usage
+ exit 1
+fi
+
+# Create the output directory if it doesn't exist
+mkdir -p "$OUTPUT_DIR"
+
+read -rsp "Postgres password for '$POSTGRES_USERNAME': " POSTGRES_PASSWORD
+echo ""
+
+# Exit immediately if a command fails
+set -e
+
+# cd to root of the synapse directory
+cd "$(dirname "$0")/.."
+
+# Create temporary SQLite and Postgres homeserver db configs and key file
+TMPDIR=$(mktemp -d)
+KEY_FILE=$TMPDIR/test.signing.key # default Synapse signing key path
+SQLITE_CONFIG=$TMPDIR/sqlite.conf
+SQLITE_DB=$TMPDIR/homeserver.db
+POSTGRES_CONFIG=$TMPDIR/postgres.conf
+
+# Ensure these files are delete on script exit
+trap 'rm -rf $TMPDIR' EXIT
+
+cat > "$SQLITE_CONFIG" <<EOF
+server_name: "test"
+
+signing_key_path: "$KEY_FILE"
+macaroon_secret_key: "abcde"
+
+report_stats: false
+
+database:
+ name: "sqlite3"
+ args:
+ database: "$SQLITE_DB"
+
+# Suppress the key server warning.
+trusted_key_servers: []
+EOF
+
+cat > "$POSTGRES_CONFIG" <<EOF
+server_name: "test"
+
+signing_key_path: "$KEY_FILE"
+macaroon_secret_key: "abcde"
+
+report_stats: false
+
+database:
+ name: "psycopg2"
+ args:
+ user: "$POSTGRES_USERNAME"
+ host: "$POSTGRES_HOST"
+ password: "$POSTGRES_PASSWORD"
+ database: "$POSTGRES_DB_NAME"
+
+# Suppress the key server warning.
+trusted_key_servers: []
+EOF
+
+# Generate the server's signing key.
+echo "Generating SQLite3 db schema..."
+python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
+
+# Make sure the SQLite3 database is using the latest schema and has no pending background update.
+echo "Running db background jobs..."
+scripts-dev/update_database --database-config "$SQLITE_CONFIG"
+
+# Create the PostgreSQL database.
+echo "Creating postgres database..."
+createdb $POSTGRES_DB_NAME
+
+echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
+if [ -z "$COVERAGE" ]; then
+ # No coverage needed
+ scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+else
+ # Coverage desired
+ coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+fi
+
+# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
+# This needs to be done after synapse_port_db is run
+echo "Dropping unwanted db tables..."
+SQL="
+DROP TABLE schema_version;
+DROP TABLE applied_schema_deltas;
+DROP TABLE applied_module_schemas;
+"
+sqlite3 "$SQLITE_DB" <<< "$SQL"
+psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL"
+
+echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..."
+sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE"
+
+echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..."
+pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE"
+
+echo "Cleaning up temporary Postgres database..."
+dropdb $POSTGRES_DB_NAME
+
+echo "Done! Files dumped to: $OUTPUT_DIR"
diff --git a/scripts-dev/update_database b/scripts-dev/update_database
new file mode 100755
index 0000000000..94aa8758b4
--- /dev/null
+++ b/scripts-dev/update_database
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import sys
+
+import yaml
+
+from twisted.internet import defer, reactor
+
+import synapse
+from synapse.config.homeserver import HomeServerConfig
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.server import HomeServer
+from synapse.storage import DataStore
+from synapse.util.versionstring import get_version_string
+
+logger = logging.getLogger("update_database")
+
+
+class MockHomeserver(HomeServer):
+ DATASTORE_CLASS = DataStore
+
+ def __init__(self, config, **kwargs):
+ super(MockHomeserver, self).__init__(
+ config.server_name, reactor=reactor, config=config, **kwargs
+ )
+
+ self.version_string = "Synapse/"+get_version_string(synapse)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description=(
+ "Updates a synapse database to the latest schema and runs background updates"
+ " on it."
+ )
+ )
+ parser.add_argument("-v", action="store_true")
+ parser.add_argument(
+ "--database-config",
+ type=argparse.FileType("r"),
+ required=True,
+ help="A database config file for either a SQLite3 database or a PostgreSQL one.",
+ )
+
+ args = parser.parse_args()
+
+ logging_config = {
+ "level": logging.DEBUG if args.v else logging.INFO,
+ "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
+ }
+
+ logging.basicConfig(**logging_config)
+
+ # Load, process and sanity-check the config.
+ hs_config = yaml.safe_load(args.database_config)
+
+ if "database" not in hs_config:
+ sys.stderr.write("The configuration file must have a 'database' section.\n")
+ sys.exit(4)
+
+ config = HomeServerConfig()
+ config.parse_config_dict(hs_config, "", "")
+
+ # Instantiate and initialise the homeserver object.
+ hs = MockHomeserver(config)
+
+ # Setup instantiates the store within the homeserver object and updates the
+ # DB.
+ hs.setup()
+ store = hs.get_datastore()
+
+ async def run_background_updates():
+ await store.db.updates.run_background_updates(sleep=False)
+ # Stop the reactor to exit the script once every background update is run.
+ reactor.stop()
+
+ def run():
+ # Apply all background updates on the database.
+ defer.ensureDeferred(
+ run_as_background_process("background_updates", run_background_updates)
+ )
+
+ reactor.callWhenRunning(run)
+
+ reactor.run()
|