in e2e-examples/gcs/benchmark_analysis/analyze_peer.py [0:0]
def transform(args):
peer_map = {}
with open(args.file) as f:
j = json.loads(clean_json(f.read()), strict=False)
for op in j["operations"]:
status = op["status"]
if status != 0:
continue
peer = op["peer"]
d = peer_map.setdefault(peer, PeerData())
apply_chunks(d, op["chunks"])
rows = []
ts = min(min(v.time_map.keys()) for v in peer_map.values())
te = max(max(v.time_map.keys()) for v in peer_map.values())
tc = ts
while tc <= te:
row = { "Time": tc.isoformat()}
total_count = 0
total_bytes = 0
for i, p in enumerate(peer_map.values()):
td = p.time_map.get(tc, None)
if td:
row["c" + str(i + 1)] = td.read_count
row["t" + str(i + 1)] = td.read_bytes
total_count += td.read_count
total_bytes += td.read_bytes
row["c_all"] = total_count
row["t_all"] = total_bytes
rows.append(row)
tc += datetime.timedelta(seconds=1)
fields = ['Time', 'c_all', 't_all']
for i in range(len(peer_map)):
fields.append("c" + str(i + 1))
fields.append("t" + str(i + 1))
w = csv.DictWriter(sys.stdout, fields, delimiter='\t')
w.writeheader()
w.writerows(rows)