def main()

in prediction_generation/old-code/cpdbench_mozilla_rep_with_restart_on_alert.py [0:0]


def main():
    logger = logging.getLogger(__name__)
    args = parse_args()
    data, mat = load_dataset(args.input)
    raw_data = data.copy()
    start_time = time.time()
    with open(args.signatures_attributes, 'r') as file:
        signatures_attributes = json.load(file)
    signature_id = os.path.splitext(os.path.basename(args.input))[0]
    signature_attributes = signatures_attributes[signature_id]
    Signature = namedtuple('Signature', signature_attributes.keys())
    signature = Signature(**signature_attributes)
    raw_args = copy.deepcopy(args)
    #try:
    series = data['series'][0]['raw']
    push_timestamp = data['time']['raw']
    push_timestamp = [datetime.strptime(ts, "%Y-%m-%d %H:%M:%S") for ts in push_timestamp]
    #unique_push_timestamp = sorted(set(datetime.strptime(ts, "%Y-%m-%d %H:%M:%S") for ts in push_timestamp))
    unique_push_timestamp = sorted(set(push_timestamp))
    grouped_data = defaultdict(list)
    for ts, value in zip(push_timestamp, series):
        grouped_data[ts].append(value)
    data = [RevisionDatum(ts, ts, grouped_data[ts]) for ts in grouped_data]
    # data_sorted = sorted(data)
    # These values are the default taken from the Mozilla code, Note that min_back_window, max_back_window, and fore_window come from class Performancesignature, I did not find them on record in the signatures data we have o we will be using the defaults
    min_back_window=12
    max_back_window=24
    fore_window=12
    alert_threshold=2
    locations = []


    selected_data = data
    alert_detected_in_prev_iteration = True
    while alert_detected_in_prev_iteration:
        temp_locations = []
        temp_timestamps = []
        analyzed_series = detect_changes(
            selected_data,
            min_back_window=min_back_window,
            max_back_window=max_back_window,
            fore_window=fore_window,
        )
        for prev, cur in zip(analyzed_series, analyzed_series[1:]):
            if cur.change_detected:
                prev_value = cur.historical_stats["avg"]
                new_value = cur.forward_stats["avg"]
                alert_properties = get_alert_properties(
                    prev_value, new_value, signature.lower_is_better
                )
                noise_profile = "N/A"
                try:
                    # Gather all data up to the current data point that
                    # shows the regression and obtain a noise profile on it.
                    # This helps us to ignore this alert and others in the
                    # calculation that could influence the profile.
                    noise_data = []
                    for point in analyzed_series:
                        if point == cur:
                            break
                        noise_data.append(geomean(point.values))

                    noise_profile, _ = moz_measure_noise.deviance(noise_data)
                    if not isinstance(noise_profile, str):
                        raise Exception(
                            f"Expecting a string as a noise profile, got: {type(noise_profile)}"
                        )
                except Exception:
                    # Fail without breaking the alert computation
                    newrelic.agent.notice_error()
                    logger.error("Failed to obtain a noise profile.")

                # ignore regressions below the configured regression threshold

                # ALERT_PCT, ALERT_ABS, and ALERT_CHANGE_TYPES come from the PerformanceSignature class in the Treeherder code
                ALERT_PCT = 0
                ALERT_ABS = 1
                ALERT_CHANGE_TYPES = ((ALERT_PCT, "percentage"), (ALERT_ABS, "absolute"))
                if (
                    (
                        signature.alert_change_type is None
                        or signature.alert_change_type == ALERT_PCT
                    )
                    and alert_properties.pct_change < alert_threshold
                ) or (
                    signature.alert_change_type == ALERT_ABS
                    and abs(alert_properties.delta) < alert_threshold
                ):
                    continue

                # django/mysql doesn't understand "inf", so just use some
                # arbitrarily high value for that case
                t_value = cur.t
                if t_value == float("inf"):
                    t_value = 1000

                # This is where we create the alert aka append its index in the locations list
                temp_locations += [i for i, ts in enumerate(unique_push_timestamp) if ts == cur.push_timestamp]
                temp_timestamps += [ts for i, ts in enumerate(unique_push_timestamp) if ts == cur.push_timestamp]
        if len(temp_locations) == 0:
            alert_detected_in_prev_iteration = False
        else:
            earliest_alert_timestamp = sorted(temp_timestamps)[0]
            earliest_alert_index = sorted(temp_locations)[0]
            selected_data = [rev for rev in selected_data if rev.push_timestamp > earliest_alert_timestamp]
            locations += [earliest_alert_index]

    stop_time = time.time()
    runtime = stop_time - start_time
    exit_success(raw_data, raw_args, vars(args), locations, runtime, __file__)