diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py
index 4fa8792a5f..b3d11f49ec 100644
--- a/scripts-dev/check_auth.py
+++ b/scripts-dev/check_auth.py
@@ -1,21 +1,20 @@
-from synapse.events import FrozenEvent
-from synapse.api.auth import Auth
-
-from mock import Mock
+from __future__ import print_function
import argparse
import itertools
import json
import sys
+from mock import Mock
+
+from synapse.api.auth import Auth
+from synapse.events import FrozenEvent
+
def check_auth(auth, auth_chain, events):
auth_chain.sort(key=lambda e: e.depth)
- auth_map = {
- e.event_id: e
- for e in auth_chain
- }
+ auth_map = {e.event_id: e for e in auth_chain}
create_events = {}
for e in auth_chain:
@@ -25,31 +24,26 @@ def check_auth(auth, auth_chain, events):
for e in itertools.chain(auth_chain, events):
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
- auth_events = {
- (e.type, e.state_key): e
- for e in auth_events_list
- }
+ auth_events = {(e.type, e.state_key): e for e in auth_events_list}
auth_events[("m.room.create", "")] = create_events[e.room_id]
try:
auth.check(e, auth_events=auth_events)
except Exception as ex:
- print "Failed:", e.event_id, e.type, e.state_key
- print "Auth_events:", auth_events
- print ex
- print json.dumps(e.get_dict(), sort_keys=True, indent=4)
+ print("Failed:", e.event_id, e.type, e.state_key)
+ print("Auth_events:", auth_events)
+ print(ex)
+ print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
# raise
- print "Success:", e.event_id, e.type, e.state_key
+ print("Success:", e.event_id, e.type, e.state_key)
+
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
- 'json',
- nargs='?',
- type=argparse.FileType('r'),
- default=sys.stdin,
+ 'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
)
args = parser.parse_args()
diff --git a/scripts-dev/check_event_hash.py b/scripts-dev/check_event_hash.py
index 7ccae34d48..8535f99697 100644
--- a/scripts-dev/check_event_hash.py
+++ b/scripts-dev/check_event_hash.py
@@ -1,10 +1,15 @@
-from synapse.crypto.event_signing import *
-from unpaddedbase64 import encode_base64
-
import argparse
import hashlib
-import sys
import json
+import logging
+import sys
+
+from unpaddedbase64 import encode_base64
+
+from synapse.crypto.event_signing import (
+ check_event_content_hash,
+ compute_event_reference_hash,
+)
class dictobj(dict):
@@ -24,27 +29,26 @@ class dictobj(dict):
def main():
parser = argparse.ArgumentParser()
- parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
- default=sys.stdin)
+ parser.add_argument(
+ "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
+ )
args = parser.parse_args()
logging.basicConfig()
event_json = dictobj(json.load(args.input_json))
- algorithms = {
- "sha256": hashlib.sha256,
- }
+ algorithms = {"sha256": hashlib.sha256}
for alg_name in event_json.hashes:
if check_event_content_hash(event_json, algorithms[alg_name]):
- print "PASS content hash %s" % (alg_name,)
+ print("PASS content hash %s" % (alg_name,))
else:
- print "FAIL content hash %s" % (alg_name,)
+ print("FAIL content hash %s" % (alg_name,))
for algorithm in algorithms.values():
name, h_bytes = compute_event_reference_hash(event_json, algorithm)
- print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
+ print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
-if __name__=="__main__":
- main()
+if __name__ == "__main__":
+ main()
diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py
index 079577908a..612f17ca7f 100644
--- a/scripts-dev/check_signature.py
+++ b/scripts-dev/check_signature.py
@@ -1,15 +1,15 @@
-from signedjson.sign import verify_signed_json
-from signedjson.key import decode_verify_key_bytes, write_signing_keys
-from unpaddedbase64 import decode_base64
-
-import urllib2
+import argparse
import json
+import logging
import sys
+import urllib2
+
import dns.resolver
-import pprint
-import argparse
-import logging
+from signedjson.key import decode_verify_key_bytes, write_signing_keys
+from signedjson.sign import verify_signed_json
+from unpaddedbase64 import decode_base64
+
def get_targets(server_name):
if ":" in server_name:
@@ -23,6 +23,7 @@ def get_targets(server_name):
except dns.resolver.NXDOMAIN:
yield (server_name, 8448)
+
def get_server_keys(server_name, target, port):
url = "https://%s:%i/_matrix/key/v1" % (target, port)
keys = json.load(urllib2.urlopen(url))
@@ -33,12 +34,14 @@ def get_server_keys(server_name, target, port):
verify_keys[key_id] = verify_key
return verify_keys
+
def main():
parser = argparse.ArgumentParser()
parser.add_argument("signature_name")
- parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
- default=sys.stdin)
+ parser.add_argument(
+ "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
+ )
args = parser.parse_args()
logging.basicConfig()
@@ -48,24 +51,23 @@ def main():
for target, port in get_targets(server_name):
try:
keys = get_server_keys(server_name, target, port)
- print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
+ print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port))
write_signing_keys(sys.stdout, keys.values())
break
- except:
+ except Exception:
logging.exception("Error talking to %s:%s", target, port)
json_to_check = json.load(args.input_json)
- print "Checking JSON:"
+ print("Checking JSON:")
for key_id in json_to_check["signatures"][args.signature_name]:
try:
key = keys[key_id]
verify_signed_json(json_to_check, args.signature_name, key)
- print "PASS %s" % (key_id,)
- except:
+ print("PASS %s" % (key_id,))
+ except Exception:
logging.exception("Check for key %s failed" % (key_id,))
- print "FAIL %s" % (key_id,)
+ print("FAIL %s" % (key_id,))
if __name__ == '__main__':
main()
-
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index 151551f22c..dde8596697 100644
--- a/scripts-dev/convert_server_keys.py
+++ b/scripts-dev/convert_server_keys.py
@@ -1,13 +1,21 @@
-import psycopg2
-import yaml
-import sys
+import hashlib
import json
+import sys
import time
-import hashlib
-from unpaddedbase64 import encode_base64
+
+import six
+
+import psycopg2
+import yaml
+from canonicaljson import encode_canonical_json
from signedjson.key import read_signing_keys
from signedjson.sign import sign_json
-from canonicaljson import encode_canonical_json
+from unpaddedbase64 import encode_base64
+
+if six.PY2:
+ db_type = six.moves.builtins.buffer
+else:
+ db_type = memoryview
def select_v1_keys(connection):
@@ -39,7 +47,9 @@ def select_v2_json(connection):
cursor.close()
results = {}
for server_name, key_id, key_json in rows:
- results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
+ results.setdefault(server_name, {})[key_id] = json.loads(
+ str(key_json).decode("utf-8")
+ )
return results
@@ -47,10 +57,7 @@ def convert_v1_to_v2(server_name, valid_until, keys, certificate):
return {
"old_verify_keys": {},
"server_name": server_name,
- "verify_keys": {
- key_id: {"key": key}
- for key_id, key in keys.items()
- },
+ "verify_keys": {key_id: {"key": key} for key_id, key in keys.items()},
"valid_until_ts": valid_until,
"tls_fingerprints": [fingerprint(certificate)],
}
@@ -65,7 +72,7 @@ def rows_v2(server, json):
valid_until = json["valid_until_ts"]
key_json = encode_canonical_json(json)
for key_id in json["verify_keys"]:
- yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
+ yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
def main():
@@ -87,7 +94,7 @@ def main():
result = {}
for server in keys:
- if not server in json:
+ if server not in json:
v2_json = convert_v1_to_v2(
server, valid_until, keys[server], certificates[server]
)
@@ -96,10 +103,7 @@ def main():
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
- rows = list(
- row for server, json in result.items()
- for row in rows_v2(server, json)
- )
+ rows = list(row for server, json in result.items() for row in rows_v2(server, json))
cursor = connection.cursor()
cursor.executemany(
@@ -107,7 +111,7 @@ def main():
" server_name, key_id, from_server,"
" ts_added_ms, ts_valid_until_ms, key_json"
") VALUES (%s, %s, %s, %s, %s, %s)",
- rows
+ rows,
)
connection.commit()
diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
index 47dac7772d..1deb0fe2b7 100755
--- a/scripts-dev/definitions.py
+++ b/scripts-dev/definitions.py
@@ -1,8 +1,16 @@
#! /usr/bin/python
+from __future__ import print_function
+
+import argparse
import ast
+import os
+import re
+import sys
+
import yaml
+
class DefinitionVisitor(ast.NodeVisitor):
def __init__(self):
super(DefinitionVisitor, self).__init__()
@@ -42,15 +50,18 @@ def non_empty(defs):
functions = {name: non_empty(f) for name, f in defs['def'].items()}
classes = {name: non_empty(f) for name, f in defs['class'].items()}
result = {}
- if functions: result['def'] = functions
- if classes: result['class'] = classes
+ if functions:
+ result['def'] = functions
+ if classes:
+ result['class'] = classes
names = defs['names']
uses = []
for name in names.get('Load', ()):
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
uses.append(name)
uses.extend(defs['attrs'])
- if uses: result['uses'] = uses
+ if uses:
+ result['uses'] = uses
result['names'] = names
result['attrs'] = defs['attrs']
return result
@@ -95,7 +106,6 @@ def used_names(prefix, item, defs, names):
if __name__ == '__main__':
- import sys, os, argparse, re
parser = argparse.ArgumentParser(description='Find definitions.')
parser.add_argument(
@@ -105,24 +115,28 @@ if __name__ == '__main__':
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
)
parser.add_argument(
- "--pattern", action="append", metavar="REGEXP",
- help="Search for a pattern"
+ "--pattern", action="append", metavar="REGEXP", help="Search for a pattern"
)
parser.add_argument(
- "directories", nargs='+', metavar="DIR",
- help="Directories to search for definitions"
+ "directories",
+ nargs='+',
+ metavar="DIR",
+ help="Directories to search for definitions",
)
parser.add_argument(
- "--referrers", default=0, type=int,
- help="Include referrers up to the given depth"
+ "--referrers",
+ default=0,
+ type=int,
+ help="Include referrers up to the given depth",
)
parser.add_argument(
- "--referred", default=0, type=int,
- help="Include referred down to the given depth"
+ "--referred",
+ default=0,
+ type=int,
+ help="Include referred down to the given depth",
)
parser.add_argument(
- "--format", default="yaml",
- help="Output format, one of 'yaml' or 'dot'"
+ "--format", default="yaml", help="Output format, one of 'yaml' or 'dot'"
)
args = parser.parse_args()
@@ -162,7 +176,7 @@ if __name__ == '__main__':
for used_by in entry.get("used", ()):
referrers.add(used_by)
for name, definition in names.items():
- if not name in referrers:
+ if name not in referrers:
continue
if ignore and any(pattern.match(name) for pattern in ignore):
continue
@@ -176,7 +190,7 @@ if __name__ == '__main__':
for uses in entry.get("uses", ()):
referred.add(uses)
for name, definition in names.items():
- if not name in referred:
+ if name not in referred:
continue
if ignore and any(pattern.match(name) for pattern in ignore):
continue
@@ -185,12 +199,12 @@ if __name__ == '__main__':
if args.format == 'yaml':
yaml.dump(result, sys.stdout, default_flow_style=False)
elif args.format == 'dot':
- print "digraph {"
+ print("digraph {")
for name, entry in result.items():
- print name
+ print(name)
for used_by in entry.get("used", ()):
if used_by in result:
- print used_by, "->", name
- print "}"
+ print(used_by, "->", name)
+ print("}")
else:
raise ValueError("Unknown format %r" % (args.format))
diff --git a/scripts-dev/dump_macaroon.py b/scripts-dev/dump_macaroon.py
index fcc5568835..22b30fa78e 100755
--- a/scripts-dev/dump_macaroon.py
+++ b/scripts-dev/dump_macaroon.py
@@ -1,8 +1,11 @@
#!/usr/bin/env python2
-import pymacaroons
+from __future__ import print_function
+
import sys
+import pymacaroons
+
if len(sys.argv) == 1:
sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
sys.exit(1)
@@ -11,14 +14,14 @@ macaroon_string = sys.argv[1]
key = sys.argv[2] if len(sys.argv) > 2 else None
macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
-print macaroon.inspect()
+print(macaroon.inspect())
-print ""
+print("")
verifier = pymacaroons.Verifier()
verifier.satisfy_general(lambda c: True)
try:
verifier.verify(macaroon, key)
- print "Signature is correct"
+ print("Signature is correct")
except Exception as e:
- print str(e)
+ print(str(e))
diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py
index d2acc7654d..2566ce7cef 100755
--- a/scripts-dev/federation_client.py
+++ b/scripts-dev/federation_client.py
@@ -18,21 +18,21 @@
from __future__ import print_function
import argparse
+import base64
+import json
+import sys
from urlparse import urlparse, urlunparse
import nacl.signing
-import json
-import base64
import requests
-import sys
-
-from requests.adapters import HTTPAdapter
import srvlookup
import yaml
+from requests.adapters import HTTPAdapter
# uncomment the following to enable debug logging of http requests
-#from httplib import HTTPConnection
-#HTTPConnection.debuglevel = 1
+# from httplib import HTTPConnection
+# HTTPConnection.debuglevel = 1
+
def encode_base64(input_bytes):
"""Encode bytes as a base64 string without any padding."""
@@ -58,15 +58,15 @@ def decode_base64(input_string):
def encode_canonical_json(value):
return json.dumps(
- value,
- # Encode code-points outside of ASCII as UTF-8 rather than \u escapes
- ensure_ascii=False,
- # Remove unecessary white space.
- separators=(',',':'),
- # Sort the keys of dictionaries.
- sort_keys=True,
- # Encode the resulting unicode as UTF-8 bytes.
- ).encode("UTF-8")
+ value,
+ # Encode code-points outside of ASCII as UTF-8 rather than \u escapes
+ ensure_ascii=False,
+ # Remove unecessary white space.
+ separators=(',', ':'),
+ # Sort the keys of dictionaries.
+ sort_keys=True,
+ # Encode the resulting unicode as UTF-8 bytes.
+ ).encode("UTF-8")
def sign_json(json_object, signing_key, signing_name):
@@ -88,6 +88,7 @@ def sign_json(json_object, signing_key, signing_name):
NACL_ED25519 = "ed25519"
+
def decode_signing_key_base64(algorithm, version, key_base64):
"""Decode a base64 encoded signing key
Args:
@@ -143,14 +144,12 @@ def request_json(method, origin_name, origin_key, destination, path, content):
authorization_headers = []
for key, sig in signed_json["signatures"][origin_name].items():
- header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
- origin_name, key, sig,
- )
+ header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
authorization_headers.append(bytes(header))
- print ("Authorization: %s" % header, file=sys.stderr)
+ print("Authorization: %s" % header, file=sys.stderr)
dest = "matrix://%s%s" % (destination, path)
- print ("Requesting %s" % dest, file=sys.stderr)
+ print("Requesting %s" % dest, file=sys.stderr)
s = requests.Session()
s.mount("matrix://", MatrixConnectionAdapter())
@@ -158,10 +157,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
result = s.request(
method=method,
url=dest,
- headers={
- "Host": destination,
- "Authorization": authorization_headers[0]
- },
+ headers={"Host": destination, "Authorization": authorization_headers[0]},
verify=False,
data=content,
)
@@ -171,50 +167,50 @@ def request_json(method, origin_name, origin_key, destination, path, content):
def main():
parser = argparse.ArgumentParser(
- description=
- "Signs and sends a federation request to a matrix homeserver",
+ description="Signs and sends a federation request to a matrix homeserver"
)
parser.add_argument(
- "-N", "--server-name",
+ "-N",
+ "--server-name",
help="Name to give as the local homeserver. If unspecified, will be "
- "read from the config file.",
+ "read from the config file.",
)
parser.add_argument(
- "-k", "--signing-key-path",
+ "-k",
+ "--signing-key-path",
help="Path to the file containing the private ed25519 key to sign the "
- "request with.",
+ "request with.",
)
parser.add_argument(
- "-c", "--config",
+ "-c",
+ "--config",
default="homeserver.yaml",
help="Path to server config file. Ignored if --server-name and "
- "--signing-key-path are both given.",
+ "--signing-key-path are both given.",
)
parser.add_argument(
- "-d", "--destination",
+ "-d",
+ "--destination",
default="matrix.org",
help="name of the remote homeserver. We will do SRV lookups and "
- "connect appropriately.",
+ "connect appropriately.",
)
parser.add_argument(
- "-X", "--method",
+ "-X",
+ "--method",
help="HTTP method to use for the request. Defaults to GET if --data is"
- "unspecified, POST if it is."
+ "unspecified, POST if it is.",
)
- parser.add_argument(
- "--body",
- help="Data to send as the body of the HTTP request"
- )
+ parser.add_argument("--body", help="Data to send as the body of the HTTP request")
parser.add_argument(
- "path",
- help="request path. We will add '/_matrix/federation/v1/' to this."
+ "path", help="request path. We will add '/_matrix/federation/v1/' to this."
)
args = parser.parse_args()
@@ -227,13 +223,15 @@ def main():
result = request_json(
args.method,
- args.server_name, key, args.destination,
+ args.server_name,
+ key,
+ args.destination,
"/_matrix/federation/v1/" + args.path,
content=args.body,
)
json.dump(result, sys.stdout)
- print ("")
+ print("")
def read_args_from_config(args):
@@ -253,7 +251,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
return s, 8448
if ":" in s:
- out = s.rsplit(":",1)
+ out = s.rsplit(":", 1)
try:
port = int(out[1])
except ValueError:
@@ -263,7 +261,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
try:
srv = srvlookup.lookup("matrix", "tcp", s)[0]
return srv.host, srv.port
- except:
+ except Exception:
return s, 8448
def get_connection(self, url, proxies=None):
@@ -272,10 +270,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
(host, port) = self.lookup(parsed.netloc)
netloc = "%s:%d" % (host, port)
print("Connecting to %s" % (netloc,), file=sys.stderr)
- url = urlunparse((
- "https", netloc, parsed.path, parsed.params, parsed.query,
- parsed.fragment,
- ))
+ url = urlunparse(
+ ("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment)
+ )
return super(MatrixConnectionAdapter, self).get_connection(url, proxies)
diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index 616d6a10e7..514d80fa60 100644
--- a/scripts-dev/hash_history.py
+++ b/scripts-dev/hash_history.py
@@ -1,23 +1,31 @@
-from synapse.storage.pdu import PduStore
-from synapse.storage.signatures import SignatureStore
-from synapse.storage._base import SQLBaseStore
-from synapse.federation.units import Pdu
-from synapse.crypto.event_signing import (
- add_event_pdu_content_hash, compute_pdu_event_reference_hash
-)
-from synapse.api.events.utils import prune_pdu
-from unpaddedbase64 import encode_base64, decode_base64
-from canonicaljson import encode_canonical_json
+from __future__ import print_function
+
import sqlite3
import sys
+from unpaddedbase64 import decode_base64, encode_base64
+
+from synapse.crypto.event_signing import (
+ add_event_pdu_content_hash,
+ compute_pdu_event_reference_hash,
+)
+from synapse.federation.units import Pdu
+from synapse.storage._base import SQLBaseStore
+from synapse.storage.pdu import PduStore
+from synapse.storage.signatures import SignatureStore
+
+
class Store(object):
_get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
_get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
_get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
- _get_pdu_origin_signatures_txn = SignatureStore.__dict__["_get_pdu_origin_signatures_txn"]
+ _get_pdu_origin_signatures_txn = SignatureStore.__dict__[
+ "_get_pdu_origin_signatures_txn"
+ ]
_store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
- _store_pdu_reference_hash_txn = SignatureStore.__dict__["_store_pdu_reference_hash_txn"]
+ _store_pdu_reference_hash_txn = SignatureStore.__dict__[
+ "_store_pdu_reference_hash_txn"
+ ]
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
@@ -26,9 +34,7 @@ store = Store()
def select_pdus(cursor):
- cursor.execute(
- "SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
- )
+ cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")
ids = cursor.fetchall()
@@ -41,23 +47,30 @@ def select_pdus(cursor):
for pdu in pdus:
try:
if pdu.prev_pdus:
- print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
+ print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
for pdu_id, origin, hashes in pdu.prev_pdus:
ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
hashes[ref_alg] = encode_base64(ref_hsh)
- store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
- print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
+ store._store_prev_pdu_hash_txn(
+ cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh
+ )
+ print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
pdu = add_event_pdu_content_hash(pdu)
ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
- store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)
+ store._store_pdu_reference_hash_txn(
+ cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh
+ )
for alg, hsh_base64 in pdu.hashes.items():
- print alg, hsh_base64
- store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))
+ print(alg, hsh_base64)
+ store._store_pdu_content_hash_txn(
+ cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)
+ )
+
+ except Exception:
+ print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
- except:
- print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
def main():
conn = sqlite3.connect(sys.argv[1])
@@ -65,5 +78,6 @@ def main():
select_pdus(cursor)
conn.commit()
-if __name__=='__main__':
+
+if __name__ == '__main__':
main()
diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py
index 58d40c4ff4..da027be26e 100755
--- a/scripts-dev/list_url_patterns.py
+++ b/scripts-dev/list_url_patterns.py
@@ -1,18 +1,17 @@
#! /usr/bin/python
-import ast
import argparse
+import ast
import os
import sys
+
import yaml
PATTERNS_V1 = []
PATTERNS_V2 = []
-RESULT = {
- "v1": PATTERNS_V1,
- "v2": PATTERNS_V2,
-}
+RESULT = {"v1": PATTERNS_V1, "v2": PATTERNS_V2}
+
class CallVisitor(ast.NodeVisitor):
def visit_Call(self, node):
@@ -21,7 +20,6 @@ class CallVisitor(ast.NodeVisitor):
else:
return
-
if name == "client_path_patterns":
PATTERNS_V1.append(node.args[0].s)
elif name == "client_v2_patterns":
@@ -42,8 +40,10 @@ def find_patterns_in_file(filepath):
parser = argparse.ArgumentParser(description='Find url patterns.')
parser.add_argument(
- "directories", nargs='+', metavar="DIR",
- help="Directories to search for definitions"
+ "directories",
+ nargs='+',
+ metavar="DIR",
+ help="Directories to search for definitions",
)
args = parser.parse_args()
diff --git a/scripts-dev/tail-synapse.py b/scripts-dev/tail-synapse.py
index 18be711e92..7c9985d9f0 100644
--- a/scripts-dev/tail-synapse.py
+++ b/scripts-dev/tail-synapse.py
@@ -1,8 +1,9 @@
-import requests
import collections
+import json
import sys
import time
-import json
+
+import requests
Entry = collections.namedtuple("Entry", "name position rows")
@@ -30,11 +31,11 @@ def parse_response(content):
def replicate(server, streams):
- return parse_response(requests.get(
- server + "/_synapse/replication",
- verify=False,
- params=streams
- ).content)
+ return parse_response(
+ requests.get(
+ server + "/_synapse/replication", verify=False, params=streams
+ ).content
+ )
def main():
@@ -45,16 +46,16 @@ def main():
try:
streams = {
row.name: row.position
- for row in replicate(server, {"streams":"-1"})["streams"].rows
+ for row in replicate(server, {"streams": "-1"})["streams"].rows
}
- except requests.exceptions.ConnectionError as e:
+ except requests.exceptions.ConnectionError:
time.sleep(0.1)
while True:
try:
results = replicate(server, streams)
- except:
- sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
+ except Exception:
+ sys.stdout.write("connection_lost(" + repr(streams) + ")\n")
break
for update in results.values():
for row in update.rows:
@@ -62,6 +63,5 @@ def main():
streams[update.name] = update.position
-
-if __name__=='__main__':
+if __name__ == '__main__':
main()
|