def download_data()

in download_data.py [0:0]


def download_data(dataset,
                  video_ids,
                  splits,
                  download_dir,
                  keep_zip,
                  raw_dataset_assets
                  ):
    download_dir = os.path.abspath(download_dir)
    for video_id in set(video_ids):
        split = splits[video_ids.index(video_id)]
        dst_dir = os.path.join(download_dir, dataset, split)
        if dataset == 'raw':
            dst_dir = os.path.join(dst_dir, str(video_id))
            url_prefix = f"{ARkitscense_url}/raw/{split}/{video_id}" + "/{}"
            file_names = raw_files(video_id, raw_dataset_assets)
        elif dataset == '3dod':
            url_prefix = f"{ARkitscense_url}/threedod/{split}" + "/{}"
            file_names = [f"{video_id}.zip", ]
        elif dataset == 'upsampling':
            url_prefix = f"{ARkitscense_url}/upsampling/{split}" + "/{}"
            file_names = [f"{video_id}.zip", ]
        else:
            raise Exception(f'No such dataset = {dataset}')
        os.makedirs(dst_dir, exist_ok=True)

        for file_name in file_names:
            dst_zip = os.path.join(dst_dir, file_name)
            url = url_prefix.format(file_name)
            download_file(url, file_name, dst_dir)

            # unzipping data
            if file_name.endswith('.zip'):
                command = f"unzip {dst_zip} -d {dst_dir}"
                subprocess.check_call(command, shell=True)
                if not keep_zip:
                    os.remove(dst_zip)

    meta_file = "metadata.csv"
    url = f"{ARkitscense_url}/{dataset}/{meta_file}"
    dst_file = os.path.join(download_dir, dataset)
    download_file(url, meta_file, dst_file)

    if dataset == 'upsampling' and VALIDATION in splits:
        val_attributes_file = "val_attributes.csv"
        url = f"{ARkitscense_url}/upsampling/{VALIDATION}/{val_attributes_file}"
        dst_file = os.path.join(download_dir, dataset, VALIDATION)
        download_file(url, val_attributes_file, dst_file)