mozperftest_tools/mozperftest_tools/utils/artifact_downloader.py [522:570]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        CURR_TASK += 1

    for t in threads:
        t.join()

    with open(os.path.join(output_dir, "taskid_to_file_map.json"), "w") as f:
        json.dump(taskid_to_file_map, f, indent=4)

    log("Finished processing.")
    log(
        "Stats: %s PASSED, %s FAILED, %s TOTAL"
        % (str(len(ALL_TASKS) - len(FAILED)), str(len(FAILED)), str(len(ALL_TASKS)))
    )
    if FAILED:
        log(
            "Tasks the failed to have their artifact downloaded: %s"
            % "\n\t".join(FAILED)
        )

    # Return the directory where all the tasks were downloaded to
    # and split into folders.
    return output_dir, head_rev


def main():
    parser = artifact_downloader_parser()
    args = parser.parse_args()

    task_group_id = args.task_group_id[0]
    test_suites = args.test_suites_list
    artifact_to_get = args.artifact_to_get
    unzip_artifact = args.unzip_artifact
    platform = args.platform
    download_failures = args.download_failures
    ingest_continue = args.ingest_continue
    output_dir = args.output[0] if args.output is not None else os.getcwd()

    task_dir, head_rev = artifact_downloader(
        task_group_id,
        output_dir=output_dir,
        test_suites=test_suites,
        artifact_to_get=artifact_to_get,
        unzip_artifact=unzip_artifact,
        platform=platform,
        download_failures=download_failures,
        ingest_continue=ingest_continue,
    )

    return task_dir
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



variance-analysis/artifactdownloader/artifact_downloader.py [530:578]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        CURR_TASK += 1

    for t in threads:
        t.join()

    with open(os.path.join(output_dir, "taskid_to_file_map.json"), "w") as f:
        json.dump(taskid_to_file_map, f, indent=4)

    log("Finished processing.")
    log(
        "Stats: %s PASSED, %s FAILED, %s TOTAL"
        % (str(len(ALL_TASKS) - len(FAILED)), str(len(FAILED)), str(len(ALL_TASKS)))
    )
    if FAILED:
        log(
            "Tasks the failed to have their artifact downloaded: %s"
            % "\n\t".join(FAILED)
        )

    # Return the directory where all the tasks were downloaded to
    # and split into folders.
    return output_dir, head_rev


def main():
    parser = artifact_downloader_parser()
    args = parser.parse_args()

    task_group_id = args.task_group_id[0]
    test_suites = args.test_suites_list
    artifact_to_get = args.artifact_to_get
    unzip_artifact = args.unzip_artifact
    platform = args.platform
    download_failures = args.download_failures
    ingest_continue = args.ingest_continue
    output_dir = args.output[0] if args.output is not None else os.getcwd()

    task_dir, head_rev = artifact_downloader(
        task_group_id,
        output_dir=output_dir,
        test_suites=test_suites,
        artifact_to_get=artifact_to_get,
        unzip_artifact=unzip_artifact,
        platform=platform,
        download_failures=download_failures,
        ingest_continue=ingest_continue,
    )

    return task_dir
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



