variance-analysis/artifactdownloader/artifact_downloader.py [507:559]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            t = threading.Thread(
                target=get_artifacts,
                args=(
                    task_id,
                    downloads_dir,
                    data_dir,
                    unzip_artifact,
                    task_counters[test_name],
                    test_name,
                    artifact_to_get,
                    download_failures,
                    taskid_to_file_map,
                ),
            )
            t.daemon = True

            t.start()
            threads.append(t)

            while CURR_REQS >= MAX_REQUESTS:
                time.sleep(1)
                log("Waiting for requests to finish, currently at %s" % str(CURR_REQS))

        CURR_TASK += 1

    for t in threads:
        t.join()

    with open(os.path.join(output_dir, "taskid_to_file_map.json"), "w") as f:
        json.dump(taskid_to_file_map, f, indent=4)

    log("Finished processing.")
    log(
        "Stats: %s PASSED, %s FAILED, %s TOTAL"
        % (str(len(ALL_TASKS) - len(FAILED)), str(len(FAILED)), str(len(ALL_TASKS)))
    )
    if FAILED:
        log(
            "Tasks the failed to have their artifact downloaded: %s"
            % "\n\t".join(FAILED)
        )

    # Return the directory where all the tasks were downloaded to
    # and split into folders.
    return output_dir, head_rev


def main():
    parser = artifact_downloader_parser()
    args = parser.parse_args()

    task_group_id = args.task_group_id[0]
    test_suites = args.test_suites_list
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



variance-analysis/mach_perftest_notebook_dev/perftestnotebook/artifact_downloader.py [411:462]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            t = threading.Thread(
                target=get_artifacts,
                args=(
                    task_id,
                    downloads_dir,
                    data_dir,
                    unzip_artifact,
                    task_counters[test_name],
                    test_name,
                    artifact_to_get,
                    download_failures,
                    taskid_to_file_map,
                ),
            )
            t.daemon = True

            t.start()
            threads.append(t)

            while CURR_REQS >= MAX_REQUESTS:
                time.sleep(1)
                log("Waiting for requests to finish, currently at %s" % str(CURR_REQS))
        CURR_TASK += 1

    for t in threads:
        t.join()

    with open(os.path.join(output_dir, "taskid_to_file_map.json"), "w") as f:
        json.dump(taskid_to_file_map, f, indent=4)

    log("Finished processing.")
    log(
        "Stats: %s PASSED, %s FAILED, %s TOTAL"
        % (str(len(ALL_TASKS) - len(FAILED)), str(len(FAILED)), str(len(ALL_TASKS)))
    )
    if FAILED:
        log(
            "Tasks the failed to have their artifact downloaded: %s"
            % "\n\t".join(FAILED)
        )

    # Return the directory where all the tasks were downloaded to
    # and split into folders.
    return output_dir, head_rev


def main():
    parser = artifact_downloader_parser()
    args = parser.parse_args()

    task_group_id = args.task_group_id[0]
    test_suites = args.test_suites_list
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



