mozperftest_tools/mozperftest_tools/utils/artifact_downloader.py [273:348]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    head_rev = ""
    all_tasks = False
    if "all" in test_suites:
        all_tasks = True

    # For compatibility
    if type(artifact_to_get) not in (list,):
        artifact_to_get = [artifact_to_get]

    # Make the data directories
    task_dir = os.path.join(output_dir, task_group_id)

    run_number = 0
    max_num = 0
    if not os.path.exists(task_dir):
        os.makedirs(task_dir, exist_ok=True)
    else:
        # Get current run number
        curr_dir = os.getcwd()
        os.chdir(task_dir)
        dir_list = next(os.walk("."))[1]
        max_num = 0
        for subdir in dir_list:
            run_num = int(subdir)
            if run_num > max_num:
                max_num = run_num
        os.chdir(curr_dir)

    if not ingest_continue:
        run_number = max_num + 1

    output_dir = os.path.join(task_dir, str(run_number))
    os.makedirs(output_dir, exist_ok=True)

    log("Artifacts will be stored in %s" % output_dir)
    config_json_path = os.path.join(output_dir, "config.json")
    with open(config_json_path, "w") as f:
        json.dump(
            {
                "test_suites": test_suites,
                "platform": platform,
                "artifact": artifact_to_get,
                "download_failures": download_failures,
                "task_group_id": task_group_id,
            },
            f,
            indent=4,
        )

    log("Saved run configuration to %s" % config_json_path)

    task_ids = []
    log("Getting task group information...")
    tgi_path = os.path.join(task_dir, "task-group-information.json")
    if os.path.exists(tgi_path):
        with open(tgi_path, "r") as f:
            tasks = json.load(f)
    else:
        tasks = get_tasks_in_group(task_group_id)
        with open(tgi_path, "w") as f:
            json.dump(tasks, f, indent=4)
    log("Obtained")

    # Used to keep track of how many grcov files
    # we are downloading per test.
    task_counters = {}
    taskid_to_file_map = {}

    # For each task in this group
    threads = []
    TOTAL_TASKS = len(tasks)
    for task in tasks:
        download_this_task = False
        # Get the test name
        if platform not in task["task"]["metadata"]["name"]:
            continue
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



variance-analysis/artifactdownloader/artifact_downloader.py [281:356]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    head_rev = ""
    all_tasks = False
    if "all" in test_suites:
        all_tasks = True

    # For compatibility
    if type(artifact_to_get) not in (list,):
        artifact_to_get = [artifact_to_get]

    # Make the data directories
    task_dir = os.path.join(output_dir, task_group_id)

    run_number = 0
    max_num = 0
    if not os.path.exists(task_dir):
        os.makedirs(task_dir, exist_ok=True)
    else:
        # Get current run number
        curr_dir = os.getcwd()
        os.chdir(task_dir)
        dir_list = next(os.walk("."))[1]
        max_num = 0
        for subdir in dir_list:
            run_num = int(subdir)
            if run_num > max_num:
                max_num = run_num
        os.chdir(curr_dir)

    if not ingest_continue:
        run_number = max_num + 1

    output_dir = os.path.join(task_dir, str(run_number))
    os.makedirs(output_dir, exist_ok=True)

    log("Artifacts will be stored in %s" % output_dir)
    config_json_path = os.path.join(output_dir, "config.json")
    with open(config_json_path, "w") as f:
        json.dump(
            {
                "test_suites": test_suites,
                "platform": platform,
                "artifact": artifact_to_get,
                "download_failures": download_failures,
                "task_group_id": task_group_id,
            },
            f,
            indent=4,
        )

    log("Saved run configuration to %s" % config_json_path)

    task_ids = []
    log("Getting task group information...")
    tgi_path = os.path.join(task_dir, "task-group-information.json")
    if os.path.exists(tgi_path):
        with open(tgi_path, "r") as f:
            tasks = json.load(f)
    else:
        tasks = get_tasks_in_group(task_group_id)
        with open(tgi_path, "w") as f:
            json.dump(tasks, f, indent=4)
    log("Obtained")

    # Used to keep track of how many grcov files
    # we are downloading per test.
    task_counters = {}
    taskid_to_file_map = {}

    # For each task in this group
    threads = []
    TOTAL_TASKS = len(tasks)
    for task in tasks:
        download_this_task = False
        # Get the test name
        if platform not in task["task"]["metadata"]["name"]:
            continue
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



