def loadFromFileData()

in eval/ABX_src/abx_iterators.py [0:0]


    def loadFromFileData(self, files_data, seqList, feature_maker, normalize):

        # self.features[i]: index_start, size, context_id, phone_id, speaker_id
        self.features = []
        self.INDEX_CONTEXT = 2
        self.INDEX_PHONE = 3
        self.INDEX_SPEAKER = 4
        data = []

        totSize = 0

        print("Building the input features...")
        bar = progressbar.ProgressBar(maxval=len(seqList))
        bar.start()

        for index, vals in enumerate(seqList):

            fileID, file_path = vals
            bar.update(index)
            if fileID not in files_data:
                continue

            features = feature_maker(file_path)
            if normalize:
                features = normalize_with_singularity(features)

            features = features.detach().cpu()

            phone_data = files_data[fileID]

            for phone_start, phone_end, context_id, phone_id, speaker_id in phone_data:

                index_start = max(
                    0, int(math.ceil(self.stepFeature * phone_start - 0.5)))
                index_end = min(features.size(0),
                                int(math.floor(self.stepFeature * phone_end - 0.5)))

                if index_start >= features.size(0) or index_end <= index_start:
                    continue

                loc_size = index_end - index_start
                self.features.append([totSize, loc_size, context_id,
                                      phone_id, speaker_id])
                data.append(features[index_start:index_end])
                totSize += loc_size

        bar.finish()
        print("...done")

        self.data = torch.cat(data, dim=0)
        self.feature_dim = self.data.size(1)