summary refs log tree commit diff
path: root/scripts-dev/convert_server_keys.py
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2015-05-26 16:03:32 +0100
committerErik Johnston <erik@matrix.org>2015-05-26 16:03:32 +0100
commit6d1dea337b8ae64ddfe4c1627014d09963388b0d (patch)
treec5cefd5b0334185780ffa8d391cb2d2f70c003af /scripts-dev/convert_server_keys.py
parentChangelog (diff)
parentchangelog (diff)
downloadsynapse-6d1dea337b8ae64ddfe4c1627014d09963388b0d.tar.xz
Merge branch 'release-v0.9.1' of github.com:matrix-org/synapse v0.9.1
Diffstat (limited to 'scripts-dev/convert_server_keys.py')
-rw-r--r--scripts-dev/convert_server_keys.py116
1 files changed, 116 insertions, 0 deletions
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
new file mode 100644
index 0000000000..a1ee39059c
--- /dev/null
+++ b/scripts-dev/convert_server_keys.py
@@ -0,0 +1,116 @@
+import psycopg2
+import yaml
+import sys
+import json
+import time
+import hashlib
+from syutil.base64util import encode_base64
+from syutil.crypto.signing_key import read_signing_keys
+from syutil.crypto.jsonsign import sign_json
+from syutil.jsonutil import encode_canonical_json
+
+
+def select_v1_keys(connection):
+    cursor = connection.cursor()
+    cursor.execute("SELECT server_name, key_id, verify_key FROM server_signature_keys")
+    rows = cursor.fetchall()
+    cursor.close()
+    results = {}
+    for server_name, key_id, verify_key in rows:
+        results.setdefault(server_name, {})[key_id] = encode_base64(verify_key)
+    return results
+
+
+def select_v1_certs(connection):
+    cursor = connection.cursor()
+    cursor.execute("SELECT server_name, tls_certificate FROM server_tls_certificates")
+    rows = cursor.fetchall()
+    cursor.close()
+    results = {}
+    for server_name, tls_certificate in rows:
+        results[server_name] = tls_certificate
+    return results
+
+
+def select_v2_json(connection):
+    cursor = connection.cursor()
+    cursor.execute("SELECT server_name, key_id, key_json FROM server_keys_json")
+    rows = cursor.fetchall()
+    cursor.close()
+    results = {}
+    for server_name, key_id, key_json in rows:
+        results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
+    return results
+
+
+def convert_v1_to_v2(server_name, valid_until, keys, certificate):
+    return {
+        "old_verify_keys": {},
+        "server_name": server_name,
+        "verify_keys": {
+            key_id: {"key": key}
+            for key_id, key in keys.items()
+        },
+        "valid_until_ts": valid_until,
+        "tls_fingerprints": [fingerprint(certificate)],
+    }
+
+
+def fingerprint(certificate):
+    finger = hashlib.sha256(certificate)
+    return {"sha256": encode_base64(finger.digest())}
+
+
+def rows_v2(server, json):
+    valid_until = json["valid_until_ts"]
+    key_json = encode_canonical_json(json)
+    for key_id in json["verify_keys"]:
+        yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
+
+
+def main():
+    config = yaml.load(open(sys.argv[1]))
+    valid_until = int(time.time() / (3600 * 24)) * 1000 * 3600 * 24
+
+    server_name = config["server_name"]
+    signing_key = read_signing_keys(open(config["signing_key_path"]))[0]
+
+    database = config["database"]
+    assert database["name"] == "psycopg2", "Can only convert for postgresql"
+    args = database["args"]
+    args.pop("cp_max")
+    args.pop("cp_min")
+    connection = psycopg2.connect(**args)
+    keys = select_v1_keys(connection)
+    certificates = select_v1_certs(connection)
+    json = select_v2_json(connection)
+
+    result = {}
+    for server in keys:
+        if not server in json:
+            v2_json = convert_v1_to_v2(
+                server, valid_until, keys[server], certificates[server]
+            )
+            v2_json = sign_json(v2_json, server_name, signing_key)
+            result[server] = v2_json
+
+    yaml.safe_dump(result, sys.stdout, default_flow_style=False)
+
+    rows = list(
+        row for server, json in result.items()
+        for row in rows_v2(server, json)
+    )
+
+    cursor = connection.cursor()
+    cursor.executemany(
+        "INSERT INTO server_keys_json ("
+        " server_name, key_id, from_server,"
+        " ts_added_ms, ts_valid_until_ms, key_json"
+        ") VALUES (%s, %s, %s, %s, %s, %s)",
+        rows
+    )
+    connection.commit()
+
+
+if __name__ == '__main__':
+    main()