summary refs log tree commit diff
path: root/scripts-dev
diff options
context:
space:
mode:
Diffstat (limited to 'scripts-dev')
-rwxr-xr-xscripts-dev/check-newsfragment.sh4
-rw-r--r--scripts-dev/check_signature.py72
-rwxr-xr-xscripts-dev/complement.sh2
-rwxr-xr-xscripts-dev/definitions.py208
-rw-r--r--scripts-dev/hash_history.py81
-rwxr-xr-xscripts-dev/list_url_patterns.py60
-rw-r--r--scripts-dev/tail-synapse.py67
-rwxr-xr-xscripts-dev/test_postgresql.sh19
8 files changed, 3 insertions, 510 deletions
diff --git a/scripts-dev/check-newsfragment.sh b/scripts-dev/check-newsfragment.sh

index 493558ad65..effea0929c 100755 --- a/scripts-dev/check-newsfragment.sh +++ b/scripts-dev/check-newsfragment.sh
@@ -19,7 +19,7 @@ if ! git diff --quiet FETCH_HEAD... -- debian; then if git diff --quiet FETCH_HEAD... -- debian/changelog; then echo "Updates to debian directory, but no update to the changelog." >&2 echo "!! Please see the contributing guide for help writing your changelog entry:" >&2 - echo "https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#debian-changelog" >&2 + echo "https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#debian-changelog" >&2 exit 1 fi fi @@ -32,7 +32,7 @@ fi # Print a link to the contributing guide if the user makes a mistake CONTRIBUTING_GUIDE_TEXT="!! Please see the contributing guide for help writing your changelog entry: -https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#changelog" +https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog" # If check-newsfragment returns a non-zero exit code, print the contributing guide and exit python -m towncrier.check --compare-with=origin/develop || (echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 && exit 1) diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py deleted file mode 100644
index 6755bc5282..0000000000 --- a/scripts-dev/check_signature.py +++ /dev/null
@@ -1,72 +0,0 @@ -import argparse -import json -import logging -import sys - -import dns.resolver -import urllib2 -from signedjson.key import decode_verify_key_bytes, write_signing_keys -from signedjson.sign import verify_signed_json -from unpaddedbase64 import decode_base64 - - -def get_targets(server_name): - if ":" in server_name: - target, port = server_name.split(":") - yield (target, int(port)) - return - try: - answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV") - for srv in answers: - yield (srv.target, srv.port) - except dns.resolver.NXDOMAIN: - yield (server_name, 8448) - - -def get_server_keys(server_name, target, port): - url = "https://%s:%i/_matrix/key/v1" % (target, port) - keys = json.load(urllib2.urlopen(url)) - verify_keys = {} - for key_id, key_base64 in keys["verify_keys"].items(): - verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64)) - verify_signed_json(keys, server_name, verify_key) - verify_keys[key_id] = verify_key - return verify_keys - - -def main(): - - parser = argparse.ArgumentParser() - parser.add_argument("signature_name") - parser.add_argument( - "input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin - ) - - args = parser.parse_args() - logging.basicConfig() - - server_name = args.signature_name - keys = {} - for target, port in get_targets(server_name): - try: - keys = get_server_keys(server_name, target, port) - print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port)) - write_signing_keys(sys.stdout, keys.values()) - break - except Exception: - logging.exception("Error talking to %s:%s", target, port) - - json_to_check = json.load(args.input_json) - print("Checking JSON:") - for key_id in json_to_check["signatures"][args.signature_name]: - try: - key = keys[key_id] - verify_signed_json(json_to_check, args.signature_name, key) - print("PASS %s" % (key_id,)) - except Exception: - logging.exception("Check for key %s failed" % (key_id,)) - print("FAIL %s" % (key_id,)) - - -if __name__ == "__main__": - main() diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 0a79a4063f..d1b59ff040 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh
@@ -71,4 +71,4 @@ fi # Run the tests! echo "Images built; running complement" -go test -v -tags synapse_blacklist,msc2403,msc2716,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... +go test -v -tags synapse_blacklist,msc2716,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/... diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py deleted file mode 100755
index c82ddd9677..0000000000 --- a/scripts-dev/definitions.py +++ /dev/null
@@ -1,208 +0,0 @@ -#! /usr/bin/python - -import argparse -import ast -import os -import re -import sys - -import yaml - - -class DefinitionVisitor(ast.NodeVisitor): - def __init__(self): - super().__init__() - self.functions = {} - self.classes = {} - self.names = {} - self.attrs = set() - self.definitions = { - "def": self.functions, - "class": self.classes, - "names": self.names, - "attrs": self.attrs, - } - - def visit_Name(self, node): - self.names.setdefault(type(node.ctx).__name__, set()).add(node.id) - - def visit_Attribute(self, node): - self.attrs.add(node.attr) - for child in ast.iter_child_nodes(node): - self.visit(child) - - def visit_ClassDef(self, node): - visitor = DefinitionVisitor() - self.classes[node.name] = visitor.definitions - for child in ast.iter_child_nodes(node): - visitor.visit(child) - - def visit_FunctionDef(self, node): - visitor = DefinitionVisitor() - self.functions[node.name] = visitor.definitions - for child in ast.iter_child_nodes(node): - visitor.visit(child) - - -def non_empty(defs): - functions = {name: non_empty(f) for name, f in defs["def"].items()} - classes = {name: non_empty(f) for name, f in defs["class"].items()} - result = {} - if functions: - result["def"] = functions - if classes: - result["class"] = classes - names = defs["names"] - uses = [] - for name in names.get("Load", ()): - if name not in names.get("Param", ()) and name not in names.get("Store", ()): - uses.append(name) - uses.extend(defs["attrs"]) - if uses: - result["uses"] = uses - result["names"] = names - result["attrs"] = defs["attrs"] - return result - - -def definitions_in_code(input_code): - input_ast = ast.parse(input_code) - visitor = DefinitionVisitor() - visitor.visit(input_ast) - definitions = non_empty(visitor.definitions) - return definitions - - -def definitions_in_file(filepath): - with open(filepath) as f: - return definitions_in_code(f.read()) - - -def defined_names(prefix, defs, names): - for name, funcs in defs.get("def", {}).items(): - names.setdefault(name, {"defined": []})["defined"].append(prefix + name) - defined_names(prefix + name + ".", funcs, names) - - for name, funcs in defs.get("class", {}).items(): - names.setdefault(name, {"defined": []})["defined"].append(prefix + name) - defined_names(prefix + name + ".", funcs, names) - - -def used_names(prefix, item, defs, names): - for name, funcs in defs.get("def", {}).items(): - used_names(prefix + name + ".", name, funcs, names) - - for name, funcs in defs.get("class", {}).items(): - used_names(prefix + name + ".", name, funcs, names) - - path = prefix.rstrip(".") - for used in defs.get("uses", ()): - if used in names: - if item: - names[item].setdefault("uses", []).append(used) - names[used].setdefault("used", {}).setdefault(item, []).append(path) - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser(description="Find definitions.") - parser.add_argument( - "--unused", action="store_true", help="Only list unused definitions" - ) - parser.add_argument( - "--ignore", action="append", metavar="REGEXP", help="Ignore a pattern" - ) - parser.add_argument( - "--pattern", action="append", metavar="REGEXP", help="Search for a pattern" - ) - parser.add_argument( - "directories", - nargs="+", - metavar="DIR", - help="Directories to search for definitions", - ) - parser.add_argument( - "--referrers", - default=0, - type=int, - help="Include referrers up to the given depth", - ) - parser.add_argument( - "--referred", - default=0, - type=int, - help="Include referred down to the given depth", - ) - parser.add_argument( - "--format", default="yaml", help="Output format, one of 'yaml' or 'dot'" - ) - args = parser.parse_args() - - definitions = {} - for directory in args.directories: - for root, _, files in os.walk(directory): - for filename in files: - if filename.endswith(".py"): - filepath = os.path.join(root, filename) - definitions[filepath] = definitions_in_file(filepath) - - names = {} - for filepath, defs in definitions.items(): - defined_names(filepath + ":", defs, names) - - for filepath, defs in definitions.items(): - used_names(filepath + ":", None, defs, names) - - patterns = [re.compile(pattern) for pattern in args.pattern or ()] - ignore = [re.compile(pattern) for pattern in args.ignore or ()] - - result = {} - for name, definition in names.items(): - if patterns and not any(pattern.match(name) for pattern in patterns): - continue - if ignore and any(pattern.match(name) for pattern in ignore): - continue - if args.unused and definition.get("used"): - continue - result[name] = definition - - referrer_depth = args.referrers - referrers = set() - while referrer_depth: - referrer_depth -= 1 - for entry in result.values(): - for used_by in entry.get("used", ()): - referrers.add(used_by) - for name, definition in names.items(): - if name not in referrers: - continue - if ignore and any(pattern.match(name) for pattern in ignore): - continue - result[name] = definition - - referred_depth = args.referred - referred = set() - while referred_depth: - referred_depth -= 1 - for entry in result.values(): - for uses in entry.get("uses", ()): - referred.add(uses) - for name, definition in names.items(): - if name not in referred: - continue - if ignore and any(pattern.match(name) for pattern in ignore): - continue - result[name] = definition - - if args.format == "yaml": - yaml.dump(result, sys.stdout, default_flow_style=False) - elif args.format == "dot": - print("digraph {") - for name, entry in result.items(): - print(name) - for used_by in entry.get("used", ()): - if used_by in result: - print(used_by, "->", name) - print("}") - else: - raise ValueError("Unknown format %r" % (args.format)) diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py deleted file mode 100644
index 8d6c3d24db..0000000000 --- a/scripts-dev/hash_history.py +++ /dev/null
@@ -1,81 +0,0 @@ -import sqlite3 -import sys - -from unpaddedbase64 import decode_base64, encode_base64 - -from synapse.crypto.event_signing import ( - add_event_pdu_content_hash, - compute_pdu_event_reference_hash, -) -from synapse.federation.units import Pdu -from synapse.storage._base import SQLBaseStore -from synapse.storage.pdu import PduStore -from synapse.storage.signatures import SignatureStore - - -class Store: - _get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"] - _get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"] - _get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"] - _get_pdu_origin_signatures_txn = SignatureStore.__dict__[ - "_get_pdu_origin_signatures_txn" - ] - _store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"] - _store_pdu_reference_hash_txn = SignatureStore.__dict__[ - "_store_pdu_reference_hash_txn" - ] - _store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"] - simple_insert_txn = SQLBaseStore.__dict__["simple_insert_txn"] - - -store = Store() - - -def select_pdus(cursor): - cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC") - - ids = cursor.fetchall() - - pdu_tuples = store._get_pdu_tuples(cursor, ids) - - pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples] - - reference_hashes = {} - - for pdu in pdus: - try: - if pdu.prev_pdus: - print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus) - for pdu_id, origin, hashes in pdu.prev_pdus: - ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)] - hashes[ref_alg] = encode_base64(ref_hsh) - store._store_prev_pdu_hash_txn( - cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh - ) - print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus) - pdu = add_event_pdu_content_hash(pdu) - ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu) - reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh) - store._store_pdu_reference_hash_txn( - cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh - ) - - for alg, hsh_base64 in pdu.hashes.items(): - print(alg, hsh_base64) - store._store_pdu_content_hash_txn( - cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64) - ) - - except Exception: - print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus) - - -def main(): - conn = sqlite3.connect(sys.argv[1]) - cursor = conn.cursor() - select_pdus(cursor) - conn.commit() - - -if __name__ == "__main__": - main() diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py deleted file mode 100755
index e85420dea8..0000000000 --- a/scripts-dev/list_url_patterns.py +++ /dev/null
@@ -1,60 +0,0 @@ -#! /usr/bin/python - -import argparse -import ast -import os -import sys - -import yaml - -PATTERNS_V1 = [] -PATTERNS_V2 = [] - -RESULT = {"v1": PATTERNS_V1, "v2": PATTERNS_V2} - - -class CallVisitor(ast.NodeVisitor): - def visit_Call(self, node): - if isinstance(node.func, ast.Name): - name = node.func.id - else: - return - - if name == "client_patterns": - PATTERNS_V2.append(node.args[0].s) - - -def find_patterns_in_code(input_code): - input_ast = ast.parse(input_code) - visitor = CallVisitor() - visitor.visit(input_ast) - - -def find_patterns_in_file(filepath): - with open(filepath) as f: - find_patterns_in_code(f.read()) - - -parser = argparse.ArgumentParser(description="Find url patterns.") - -parser.add_argument( - "directories", - nargs="+", - metavar="DIR", - help="Directories to search for definitions", -) - -args = parser.parse_args() - - -for directory in args.directories: - for root, _, files in os.walk(directory): - for filename in files: - if filename.endswith(".py"): - filepath = os.path.join(root, filename) - find_patterns_in_file(filepath) - -PATTERNS_V1.sort() -PATTERNS_V2.sort() - -yaml.dump(RESULT, sys.stdout, default_flow_style=False) diff --git a/scripts-dev/tail-synapse.py b/scripts-dev/tail-synapse.py deleted file mode 100644
index 44e3a6dbf1..0000000000 --- a/scripts-dev/tail-synapse.py +++ /dev/null
@@ -1,67 +0,0 @@ -import collections -import json -import sys -import time - -import requests - -Entry = collections.namedtuple("Entry", "name position rows") - -ROW_TYPES = {} - - -def row_type_for_columns(name, column_names): - column_names = tuple(column_names) - row_type = ROW_TYPES.get((name, column_names)) - if row_type is None: - row_type = collections.namedtuple(name, column_names) - ROW_TYPES[(name, column_names)] = row_type - return row_type - - -def parse_response(content): - streams = json.loads(content) - result = {} - for name, value in streams.items(): - row_type = row_type_for_columns(name, value["field_names"]) - position = value["position"] - rows = [row_type(*row) for row in value["rows"]] - result[name] = Entry(name, position, rows) - return result - - -def replicate(server, streams): - return parse_response( - requests.get( - server + "/_synapse/replication", verify=False, params=streams - ).content - ) - - -def main(): - server = sys.argv[1] - - streams = None - while not streams: - try: - streams = { - row.name: row.position - for row in replicate(server, {"streams": "-1"})["streams"].rows - } - except requests.exceptions.ConnectionError: - time.sleep(0.1) - - while True: - try: - results = replicate(server, streams) - except Exception: - sys.stdout.write("connection_lost(" + repr(streams) + ")\n") - break - for update in results.values(): - for row in update.rows: - sys.stdout.write(repr(row) + "\n") - streams[update.name] = update.position - - -if __name__ == "__main__": - main() diff --git a/scripts-dev/test_postgresql.sh b/scripts-dev/test_postgresql.sh deleted file mode 100755
index 43cfa256e4..0000000000 --- a/scripts-dev/test_postgresql.sh +++ /dev/null
@@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -# This script builds the Docker image to run the PostgreSQL tests, and then runs -# the tests. It uses a dedicated tox environment so that we don't have to -# rebuild it each time. - -# Command line arguments to this script are forwarded to "tox" and then to "trial". - -set -e - -# Build, and tag -docker build docker/ \ - --build-arg "UID=$(id -u)" \ - --build-arg "GID=$(id -g)" \ - -f docker/Dockerfile-pgtests \ - -t synapsepgtests - -# Run, mounting the current directory into /src -docker run --rm -it -v "$(pwd):/src" -v synapse-pg-test-tox:/tox synapsepgtests "$@"