def parallel_download()

in download.py [0:0]


def parallel_download(array, function, args=(), n_cores=None):
    if n_cores == 1:
        return [function(x, *args) for x in tqdm(array)]
    with tqdm(total=len(array)) as pbar:

        def update(*args):
            pbar.update()

        if n_cores is None:
            n_cores = multiprocessing.cpu_count()
        with multiprocessing.Pool(processes=n_cores) as pool:
            jobs = [
                pool.apply_async(function, (x, *args), callback=update) for x in array
            ]
            results = [job.get() for job in jobs]
        return results