in scripts/throughput.py [0:0]
def aggregate(parsed_log_files, aggregated_parsed_log_file):
assert len(parsed_log_files) > 1
with open(parsed_log_files[0], 'r') as f:
aggregate_orders = eval(f.read())
for parsed_log_file in parsed_log_files[1:]:
with open(parsed_log_file, 'r') as f:
data = eval(f.read())
for (orders_type, orders) in data.items():
assert len(orders.items()) == 1
(z_value, items) = list(orders.items())[0]
assert len(items) == 1
if z_value in aggregate_orders[orders_type]:
aggregate_orders[orders_type][z_value] += items
else:
aggregate_orders[orders_type][z_value] = items
for (orders_type, orders) in aggregate_orders.items():
for (z_value, items) in orders.items():
items.sort(key=lambda tup: int(tup[0]))
counter = Counter(item[0] for item in items)
shards = len(counter.items())
runs = list(counter.values())[0]
assert runs * shards == len(items)
arr = np.array(items)
items = arr.reshape((shards,runs,2)).tolist()
aggregate_orders[orders_type][z_value] = items
print(aggregate_orders)
with open(aggregated_parsed_log_file, 'w') as f:
f.write(str(aggregate_orders))