2016-01-21 11:19:05 +01:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2022-06-10 14:30:14 +02:00
|
|
|
import argparse
|
|
|
|
import datetime
|
|
|
|
import html
|
|
|
|
import json
|
|
|
|
|
|
|
|
import pydot
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2022-06-10 14:30:14 +02:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
|
|
|
from synapse.events import make_event_from_dict
|
|
|
|
from synapse.util.frozenutils import unfreeze
|
|
|
|
|
|
|
|
|
|
|
|
def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|
|
|
"""
|
|
|
|
Generate a dot and SVG file for a graph of events in the room based on the
|
|
|
|
topological ordering by reading line-delimited JSON from a file.
|
|
|
|
"""
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Reading lines")
|
2016-01-21 11:19:05 +01:00
|
|
|
with open(file_name) as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Read lines")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2022-06-10 14:30:14 +02:00
|
|
|
# Figure out the room version, assume the first line is the create event.
|
|
|
|
room_version = KNOWN_ROOM_VERSIONS[
|
|
|
|
json.loads(lines[0]).get("content", {}).get("room_version")
|
|
|
|
]
|
|
|
|
|
|
|
|
events = [make_event_from_dict(json.loads(line), room_version) for line in lines]
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Loaded events.")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
events.sort(key=lambda e: e.depth)
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Sorted events")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
if limit:
|
2019-06-20 11:32:02 +02:00
|
|
|
events = events[-int(limit) :]
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
node_map = {}
|
|
|
|
|
|
|
|
graph = pydot.Dot(graph_name="Test")
|
|
|
|
|
|
|
|
for event in events:
|
|
|
|
t = datetime.datetime.fromtimestamp(
|
|
|
|
float(event.origin_server_ts) / 1000
|
2019-06-20 11:32:02 +02:00
|
|
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
|
|
|
content = content.replace("\n", "<br/>\n")
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print(content)
|
2016-01-21 11:19:05 +01:00
|
|
|
content = []
|
|
|
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
|
|
|
if value is None:
|
|
|
|
value = "<null>"
|
2020-06-16 14:51:47 +02:00
|
|
|
elif isinstance(value, str):
|
2016-01-21 11:19:05 +01:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
value = json.dumps(value)
|
|
|
|
|
|
|
|
content.append(
|
2019-06-20 11:32:02 +02:00
|
|
|
"<b>%s</b>: %s,"
|
|
|
|
% (
|
2022-06-10 14:30:14 +02:00
|
|
|
html.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
|
|
|
html.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
2016-01-21 11:19:05 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
content = "<br/>\n".join(content)
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print(content)
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
label = (
|
|
|
|
"<"
|
|
|
|
"<b>%(name)s </b><br/>"
|
|
|
|
"Type: <b>%(type)s </b><br/>"
|
|
|
|
"State key: <b>%(state_key)s </b><br/>"
|
|
|
|
"Content: <b>%(content)s </b><br/>"
|
|
|
|
"Time: <b>%(time)s </b><br/>"
|
|
|
|
"Depth: <b>%(depth)s </b><br/>"
|
|
|
|
">"
|
|
|
|
) % {
|
|
|
|
"name": event.event_id,
|
|
|
|
"type": event.type,
|
|
|
|
"state_key": event.get("state_key", None),
|
|
|
|
"content": content,
|
|
|
|
"time": t,
|
|
|
|
"depth": event.depth,
|
|
|
|
}
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
node = pydot.Node(name=event.event_id, label=label)
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
node_map[event.event_id] = node
|
|
|
|
graph.add_node(node)
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Created Nodes")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
for event in events:
|
2022-06-10 14:30:14 +02:00
|
|
|
for prev_id in event.prev_event_ids():
|
2016-01-21 11:19:05 +01:00
|
|
|
try:
|
|
|
|
end_node = node_map[prev_id]
|
2020-07-20 22:43:49 +02:00
|
|
|
except Exception:
|
2022-06-10 14:30:14 +02:00
|
|
|
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
node_map[prev_id] = end_node
|
|
|
|
graph.add_node(end_node)
|
|
|
|
|
|
|
|
edge = pydot.Edge(node_map[event.event_id], end_node)
|
|
|
|
graph.add_edge(edge)
|
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Created edges")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Created Dot")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-17 19:21:30 +02:00
|
|
|
print("Created svg")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2016-01-21 11:19:05 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Generate a PDU graph for a given room by reading "
|
2019-06-20 11:32:02 +02:00
|
|
|
"from a file with line deliminated events. \n"
|
|
|
|
"Requires pydot."
|
2016-01-21 11:19:05 +01:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2019-06-20 11:32:02 +02:00
|
|
|
"-p",
|
|
|
|
"--prefix",
|
|
|
|
dest="prefix",
|
2016-01-21 11:19:05 +01:00
|
|
|
help="String to prefix output files with",
|
2019-06-20 11:32:02 +02:00
|
|
|
default="graph_output",
|
2016-01-21 11:19:05 +01:00
|
|
|
)
|
2019-06-20 11:32:02 +02:00
|
|
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
|
|
|
parser.add_argument("event_file")
|
2016-01-21 11:19:05 +01:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-06-10 14:30:14 +02:00
|
|
|
make_graph(args.event_file, args.prefix, args.limit)
|