in generate_side_by_side.py [0:0]
def open_and_organize_perfherder(files, metric):
def _open_perfherder(filen):
with open(filen) as f:
return json.load(f)
res = {"cold": [], "warm": []}
for filen in files:
data = _open_perfherder(filen)
for suite in data["suites"]:
pl_type = "warm"
if "cold" in suite["extraOptions"]:
pl_type = "cold"
for subtest in suite["subtests"]:
if subtest["name"].lower() != metric.lower():
continue
# Each entry here will be a single retrigger of
# the test for the requested metric (ordered
# based on the `files` ordering)
res[pl_type].append(subtest)
return res