def unpackCIFAR10()

in datasets.py [0:0]


def unpackCIFAR10(pathDB, pathOut):
    toLoad = ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4',
              'data_batch_5']

    # Check valididty:
    for item in toLoad:
        filePath = os.path.join(pathDB, item)
        if not os.path.isfile(filePath):
            raise FileNotFoundError("Can't find " + filePath)

    if not os.path.isdir(pathOut):
        os.mkdir(pathOut)

    pathLabels = os.path.join(pathDB, 'batches.meta')
    with open(pathLabels, 'rb') as file:
        labels = pickle.load(file)['label_names']

    for label in labels:
        pathOutLabel = os.path.join(pathOut, label)
        if not os.path.isdir(pathOutLabel):
            os.mkdir(pathOutLabel)

    nImagesPerBatch = 10000
    nImages = nImagesPerBatch * len(toLoad)
    status = 0

    print("Unpacking CIFAR-10...")
    for item in toLoad:
        pathItem = os.path.join(pathDB, item)
        with open(pathItem, 'rb') as file:
            dict = pickle.load(file, encoding='bytes')
        data = dict[str.encode('data')]
        dataLabel = dict[str.encode('labels')]
        dataNames = dict[str.encode('filenames')]

        assert(len(dataLabel) == nImagesPerBatch)
        assert(data.shape[1] == 3072)

        for i in range(nImagesPerBatch):

            rgbArray = np.zeros((32, 32, 3), 'uint8')
            rgbArray[:, :, 0] = data[i, :1024].reshape(32, 32)
            rgbArray[:, :, 1] = data[i, 1024:2048].reshape(32, 32)
            rgbArray[:, :, 2] = data[i, 2048:].reshape(32, 32)

            name = dataNames[i].decode("utf-8")
            label = labels[dataLabel[i]]

            path = os.path.join(pathOut, os.path.join(label, name))
            saveImage(path, rgbArray)

            printProgressBar(status, nImages)
            status += 1

    printProgressBar(nImages, nImages)