in data_extraction_transformation/scripts/one_time_use_scripts/check_csvs_of_problematic_signatures.py [0:0]
def main():
global counter_empty
global counter_non_empty_but_only_tn
global other
counter_empty = 0
counter_non_empty_but_only_tn = 0
other = 0
alert_status_mapping = {
0: "untriaged",
1: "downstream",
2: "reassigned",
3: "invalid",
4: "improvement",
5: "investigating",
6: "wontfix",
7: "fixed",
8: "backedout"
}
test_status_mapping = {
0: "untriaged",
1: "downstream",
2: "reassigned",
3: "invalid",
4: "acknowledged"
}
category_mapping = {
'investigating': 'SP',
'reassigned': 'TP',
'invalid': 'FP',
'improvement': 'TP',
'fixed': 'TP',
'wontfix': 'TP',
'untriaged': 'SP',
'backedout': 'TP',
'downstream': 'TP',
'acknowledged': 'TP',
}
args = parse_args()
input_folder = args.input_folder
alerts_file = args.alerts_file
lost_signatures = args.lost_signatures
df_alerts = pd.read_csv(alerts_file)
df_alerts = df_alerts.drop(columns=['alert_push_timestamp'])
df_alerts['alert_status_general'] = df_alerts['alert_status'].map(alert_status_mapping)
df_alerts["alert_status_general"] = df_alerts["alert_status_general"].replace(category_mapping)
df_alerts.rename(columns={'test_series_signature_id': 'signature_id'}, inplace=True)
with open(lost_signatures, "r") as f:
lost_signatures_set = f.read()
lost_signatures_set = {int(x) for x in lost_signatures_set.split(",")}
projects_folders_mapping = {"autoland": ["autoland1", "autoland2", "autoland3", "autoland4"], "firefox-android": ["firefox-android"], "mozilla-beta": ["mozilla-beta"], "mozilla-central": ["mozilla-central"], "mozilla-release": ["mozilla-release"]}
if projects_folders_mapping:
for project in projects_folders_mapping:
for folder in projects_folders_mapping[project]:
process_folder(input_folder, folder, lost_signatures_set, df_alerts)
print("COUNTERRRRRRRRR")
print("enpty_counter:")
print(counter_empty)
print("counter_non_empty_but_only_tn:")
print(counter_non_empty_but_only_tn)
print("other:")
print(other)