def _open_and_organize_perfherder()

in mozperftest_tools/mozperftest_tools/side_by_side.py [0:0]


    def _open_and_organize_perfherder(self, files, metric, pageload_tests=False):
        def _open_perfherder(filen):
            with open(filen) as f:
                return json.load(f)

        if pageload_tests:
            res = {"cold": [], "warm": []}

            for filen in files:
                data = _open_perfherder(filen)

                for suite in data["suites"]:
                    pl_type = "warm"
                    if "cold" in suite["extraOptions"]:
                        pl_type = "cold"

                    for subtest in suite["subtests"]:
                        if subtest["name"].lower() != metric.lower():
                            continue
                        # Each entry here will be a single retrigger of
                        # the test for the requested metric (ordered
                        # based on the `files` ordering)
                        res[pl_type].append(subtest)
        else:
            res = {}

            for filen in files:
                data = _open_perfherder(filen)

                for suite in data["suites"]:
                    for subtest in suite["subtests"]:
                        if subtest["name"].lower() != metric.lower():
                            continue
                        # Each entry here will be a single retrigger of
                        # the test for the requested metric (ordered
                        # based on the `files` ordering)
                        res.setdefault(suite["name"], []).append(subtest)

        return res