def checkRegressions()

in benchmarking/regression_detectors/regression_detectors.py [0:0]


def checkRegressions(info, platform, framework, benchmark, reporters, meta, outdir):
    if meta["metric"] not in detectors:
        return
    commit = info["treatment"]["commit"]
    getLogger().info("Checking regression for " + commit)
    regressions, infos = _detectRegression(info, meta, outdir)
    if len(regressions):
        from driver.benchmark_driver import runOneBenchmark

        getLogger().info(
            "Regression detected on {}, ".format(platform.getMangledName())
            + "verifying: {}".format(",".join(regressions))
        )
        for i in infos:
            i["run_type"] = "verify"
            runOneBenchmark(
                i, benchmark, framework, platform, meta["backend"], reporters
            )
        verify_regressions, _ = _detectRegression(info, meta, outdir)
        if len(verify_regressions) > 0:
            # regression verified
            regressed_info = infos[-2]
            regressed_info["run_type"] = "regress"
            regressed_info["regressed_types"] = verify_regressions
            runOneBenchmark(
                regressed_info,
                benchmark,
                framework,
                platform,
                meta["backend"],
                reporters,
            )
            getLogger().info(
                "Regression confirmed for commit: {}".format(
                    regressed_info["treatment"]["commit"]
                )
            )
            getLogger().info("Regressed types: {}".format(",".join(verify_regressions)))
        getLogger().info(
            "Regression verifying completed for "
            + "{} on {}".format(platform.getMangledName(), commit)
        )
    else:
        getLogger().info(
            "No Regression found for "
            + "{} on {}".format(platform.getMangledName(), commit)
        )