summary refs log tree commit diff
path: root/scripts-dev
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2015-11-17 15:45:43 +0000
committerErik Johnston <erik@matrix.org>2015-11-17 15:45:43 +0000
commitd3861b44424aa6f03cc65719bb1527330157abea (patch)
tree4377eb0dc5e221862489bdcc802e50e2f1f41cb1 /scripts-dev
parentMerge branch 'hotfixes-v0.10.0-r2' of github.com:matrix-org/synapse (diff)
parentSlightly more aggressive retry timers at HTTP level (diff)
downloadsynapse-d3861b44424aa6f03cc65719bb1527330157abea.tar.xz
Merge branch 'release-v0.11.0' of github.com:matrix-org/synapse v0.11.0
Diffstat (limited to 'scripts-dev')
-rw-r--r--scripts-dev/check_auth.py3
-rw-r--r--scripts-dev/check_event_hash.py2
-rw-r--r--scripts-dev/check_signature.py8
-rw-r--r--scripts-dev/convert_server_keys.py8
-rwxr-xr-xscripts-dev/definitions.py142
-rw-r--r--scripts-dev/hash_history.py4
6 files changed, 153 insertions, 14 deletions
diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py
index b889ac7fa7..4fa8792a5f 100644
--- a/scripts-dev/check_auth.py
+++ b/scripts-dev/check_auth.py
@@ -56,10 +56,9 @@ if __name__ == '__main__':
 
     js = json.load(args.json)
 
-
     auth = Auth(Mock())
     check_auth(
         auth,
         [FrozenEvent(d) for d in js["auth_chain"]],
-        [FrozenEvent(d) for d in js["pdus"]],
+        [FrozenEvent(d) for d in js.get("pdus", [])],
     )
diff --git a/scripts-dev/check_event_hash.py b/scripts-dev/check_event_hash.py
index 679afbd268..7ccae34d48 100644
--- a/scripts-dev/check_event_hash.py
+++ b/scripts-dev/check_event_hash.py
@@ -1,5 +1,5 @@
 from synapse.crypto.event_signing import *
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 
 import argparse
 import hashlib
diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py
index 59e3d603ac..079577908a 100644
--- a/scripts-dev/check_signature.py
+++ b/scripts-dev/check_signature.py
@@ -1,9 +1,7 @@
 
-from syutil.crypto.jsonsign import verify_signed_json
-from syutil.crypto.signing_key import (
-    decode_verify_key_bytes, write_signing_keys
-)
-from syutil.base64util import decode_base64
+from signedjson.sign import verify_signed_json
+from signedjson.key import decode_verify_key_bytes, write_signing_keys
+from unpaddedbase64 import decode_base64
 
 import urllib2
 import json
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index a1ee39059c..151551f22c 100644
--- a/scripts-dev/convert_server_keys.py
+++ b/scripts-dev/convert_server_keys.py
@@ -4,10 +4,10 @@ import sys
 import json
 import time
 import hashlib
-from syutil.base64util import encode_base64
-from syutil.crypto.signing_key import read_signing_keys
-from syutil.crypto.jsonsign import sign_json
-from syutil.jsonutil import encode_canonical_json
+from unpaddedbase64 import encode_base64
+from signedjson.key import read_signing_keys
+from signedjson.sign import sign_json
+from canonicaljson import encode_canonical_json
 
 
 def select_v1_keys(connection):
diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
new file mode 100755
index 0000000000..f0d0cd8a3f
--- /dev/null
+++ b/scripts-dev/definitions.py
@@ -0,0 +1,142 @@
+#! /usr/bin/python
+
+import ast
+import yaml
+
+class DefinitionVisitor(ast.NodeVisitor):
+    def __init__(self):
+        super(DefinitionVisitor, self).__init__()
+        self.functions = {}
+        self.classes = {}
+        self.names = {}
+        self.attrs = set()
+        self.definitions = {
+            'def': self.functions,
+            'class': self.classes,
+            'names': self.names,
+            'attrs': self.attrs,
+        }
+
+    def visit_Name(self, node):
+        self.names.setdefault(type(node.ctx).__name__, set()).add(node.id)
+
+    def visit_Attribute(self, node):
+        self.attrs.add(node.attr)
+        for child in ast.iter_child_nodes(node):
+            self.visit(child)
+
+    def visit_ClassDef(self, node):
+        visitor = DefinitionVisitor()
+        self.classes[node.name] = visitor.definitions
+        for child in ast.iter_child_nodes(node):
+            visitor.visit(child)
+
+    def visit_FunctionDef(self, node):
+        visitor = DefinitionVisitor()
+        self.functions[node.name] = visitor.definitions
+        for child in ast.iter_child_nodes(node):
+            visitor.visit(child)
+
+
+def non_empty(defs):
+    functions = {name: non_empty(f) for name, f in defs['def'].items()}
+    classes = {name: non_empty(f) for name, f in defs['class'].items()}
+    result = {}
+    if functions: result['def'] = functions
+    if classes: result['class'] = classes
+    names = defs['names']
+    uses = []
+    for name in names.get('Load', ()):
+        if name not in names.get('Param', ()) and name not in names.get('Store', ()):
+            uses.append(name)
+    uses.extend(defs['attrs'])
+    if uses: result['uses'] = uses
+    result['names'] = names
+    result['attrs'] = defs['attrs']
+    return result
+
+
+def definitions_in_code(input_code):
+    input_ast = ast.parse(input_code)
+    visitor = DefinitionVisitor()
+    visitor.visit(input_ast)
+    definitions = non_empty(visitor.definitions)
+    return definitions
+
+
+def definitions_in_file(filepath):
+    with open(filepath) as f:
+        return definitions_in_code(f.read())
+
+
+def defined_names(prefix, defs, names):
+    for name, funcs in defs.get('def', {}).items():
+        names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
+        defined_names(prefix + name + ".", funcs, names)
+
+    for name, funcs in defs.get('class', {}).items():
+        names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
+        defined_names(prefix + name + ".", funcs, names)
+
+
+def used_names(prefix, defs, names):
+    for name, funcs in defs.get('def', {}).items():
+        used_names(prefix + name + ".", funcs, names)
+
+    for name, funcs in defs.get('class', {}).items():
+        used_names(prefix + name + ".", funcs, names)
+
+    for used in defs.get('uses', ()):
+        if used in names:
+            names[used].setdefault('used', []).append(prefix.rstrip('.'))
+
+
+if __name__ == '__main__':
+    import sys, os, argparse, re
+
+    parser = argparse.ArgumentParser(description='Find definitions.')
+    parser.add_argument(
+        "--unused", action="store_true", help="Only list unused definitions"
+    )
+    parser.add_argument(
+        "--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
+    )
+    parser.add_argument(
+        "--pattern", action="append", metavar="REGEXP",
+        help="Search for a pattern"
+    )
+    parser.add_argument(
+        "directories", nargs='+', metavar="DIR",
+        help="Directories to search for definitions"
+    )
+    args = parser.parse_args()
+
+    definitions = {}
+    for directory in args.directories:
+        for root, dirs, files in os.walk(directory):
+            for filename in files:
+                if filename.endswith(".py"):
+                    filepath = os.path.join(root, filename)
+                    definitions[filepath] = definitions_in_file(filepath)
+
+    names = {}
+    for filepath, defs in definitions.items():
+        defined_names(filepath + ":", defs, names)
+
+    for filepath, defs in definitions.items():
+        used_names(filepath + ":", defs, names)
+
+    patterns = [re.compile(pattern) for pattern in args.pattern or ()]
+    ignore = [re.compile(pattern) for pattern in args.ignore or ()]
+
+    result = {}
+    for name, definition in names.items():
+        if patterns and not any(pattern.match(name) for pattern in patterns):
+            continue
+        if ignore and any(pattern.match(name) for pattern in ignore):
+            continue
+        if args.unused and definition.get('used'):
+            continue
+        result[name] = definition
+
+    yaml.dump(result, sys.stdout, default_flow_style=False)
diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index bdad530af8..616d6a10e7 100644
--- a/scripts-dev/hash_history.py
+++ b/scripts-dev/hash_history.py
@@ -6,8 +6,8 @@ from synapse.crypto.event_signing import (
     add_event_pdu_content_hash, compute_pdu_event_reference_hash
 )
 from synapse.api.events.utils import prune_pdu
-from syutil.base64util import encode_base64, decode_base64
-from syutil.jsonutil import encode_canonical_json
+from unpaddedbase64 import encode_base64, decode_base64
+from canonicaljson import encode_canonical_json
 import sqlite3
 import sys