summary refs log tree commit diff
path: root/contrib/graph/graph2.py
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2015-01-30 14:08:28 +0000
committerErik Johnston <erik@matrix.org>2015-01-30 14:08:28 +0000
commite0b7c521cbe4d9aa4403a8e5394177a51c6d5d8f (patch)
treef84a3b1845ff2e6df699d69f3fbb99a2c1ed918b /contrib/graph/graph2.py
parentMerge branch 'new_state_resolution' of github.com:matrix-org/synapse into rej... (diff)
parentWe do need Twisted 14, not 15: we use internal Twisted things that have been ... (diff)
downloadsynapse-e0b7c521cbe4d9aa4403a8e5394177a51c6d5d8f.tar.xz
Merge branch 'develop' of github.com:matrix-org/synapse into rejections_storage
Conflicts:
	synapse/storage/__init__.py
	synapse/storage/schema/delta/v12.sql
Diffstat (limited to 'contrib/graph/graph2.py')
-rw-r--r--contrib/graph/graph2.py156
1 files changed, 156 insertions, 0 deletions
diff --git a/contrib/graph/graph2.py b/contrib/graph/graph2.py
new file mode 100644
index 0000000000..6b551d42e5
--- /dev/null
+++ b/contrib/graph/graph2.py
@@ -0,0 +1,156 @@
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sqlite3
+import pydot
+import cgi
+import json
+import datetime
+import argparse
+
+from synapse.events import FrozenEvent
+
+
+def make_graph(db_name, room_id, file_prefix, limit):
+    conn = sqlite3.connect(db_name)
+
+    sql = (
+        "SELECT json FROM event_json as j "
+        "INNER JOIN events as e ON e.event_id = j.event_id "
+        "WHERE j.room_id = ?"
+    )
+
+    args = [room_id]
+
+    if limit:
+        sql += (
+            " ORDER BY topological_ordering DESC, stream_ordering DESC "
+            "LIMIT ?"
+        )
+
+        args.append(limit)
+
+    c = conn.execute(sql, args)
+
+    events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
+
+    events.sort(key=lambda e: e.depth)
+
+    node_map = {}
+    state_groups = {}
+
+    graph = pydot.Dot(graph_name="Test")
+
+    for event in events:
+        c = conn.execute(
+            "SELECT state_group FROM event_to_state_groups "
+            "WHERE event_id = ?",
+            (event.event_id,)
+        )
+
+        res = c.fetchone()
+        state_group = res[0] if res else None
+
+        if state_group is not None:
+            state_groups.setdefault(state_group, []).append(event.event_id)
+
+        t = datetime.datetime.fromtimestamp(
+            float(event.origin_server_ts) / 1000
+        ).strftime('%Y-%m-%d %H:%M:%S,%f')
+
+        content = json.dumps(event.get_dict()["content"])
+
+        label = (
+            "<"
+            "<b>%(name)s </b><br/>"
+            "Type: <b>%(type)s </b><br/>"
+            "State key: <b>%(state_key)s </b><br/>"
+            "Content: <b>%(content)s </b><br/>"
+            "Time: <b>%(time)s </b><br/>"
+            "Depth: <b>%(depth)s </b><br/>"
+            "State group: %(state_group)s<br/>"
+            ">"
+        ) % {
+            "name": event.event_id,
+            "type": event.type,
+            "state_key": event.get("state_key", None),
+            "content": cgi.escape(content, quote=True),
+            "time": t,
+            "depth": event.depth,
+            "state_group": state_group,
+        }
+
+        node = pydot.Node(
+            name=event.event_id,
+            label=label,
+        )
+
+        node_map[event.event_id] = node
+        graph.add_node(node)
+
+    for event in events:
+        for prev_id, _ in event.prev_events:
+            try:
+                end_node = node_map[prev_id]
+            except:
+                end_node = pydot.Node(
+                    name=prev_id,
+                    label="<<b>%s</b>>" % (prev_id,),
+                )
+
+                node_map[prev_id] = end_node
+                graph.add_node(end_node)
+
+            edge = pydot.Edge(node_map[event.event_id], end_node)
+            graph.add_edge(edge)
+
+    for group, event_ids in state_groups.items():
+        if len(event_ids) <= 1:
+            continue
+
+        cluster = pydot.Cluster(
+            str(group),
+            label="<State Group: %s>" % (str(group),)
+        )
+
+        for event_id in event_ids:
+            cluster.add_node(node_map[event_id])
+
+        graph.add_subgraph(cluster)
+
+    graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
+    graph.write_svg("%s.svg" % file_prefix, prog='dot')
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description="Generate a PDU graph for a given room by talking "
+                    "to the given homeserver to get the list of PDUs. \n"
+                    "Requires pydot."
+    )
+    parser.add_argument(
+        "-p", "--prefix", dest="prefix",
+        help="String to prefix output files with",
+        default="graph_output"
+    )
+    parser.add_argument(
+        "-l", "--limit",
+        help="Only retrieve the last N events.",
+    )
+    parser.add_argument('db')
+    parser.add_argument('room')
+
+    args = parser.parse_args()
+
+    make_graph(args.db, args.room, args.prefix, args.limit)