LearningMachine.py [150:165]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                if self.conf.mode == 'normal':
                    progress = tqdm(range(len(target_batches)))
                elif self.conf.mode == 'philly':
                    progress = range(len(target_batches))
                for i in progress:
                    # the result shape: for classification: [batch_size, # of classes]; for sequence tagging: [batch_size, seq_len, # of tags]
                    param_list, inputs_desc, length_desc = transform_params2tensors(data_batches[i], length_batches[i])
                    logits = self.model(inputs_desc, length_desc, *param_list)

                    logits_softmax = {}
                    if isinstance(self.model, nn.DataParallel):
                        for tmp_output_layer_id in self.model.module.output_layer_id:
                            if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                                    (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                                logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                    logits[tmp_output_layer_id], dim=-1)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



LearningMachine.py [442:458]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            if self.conf.mode == 'normal':
                progress = tqdm(range(len(target_batches)))
            elif self.conf.mode == 'philly':
                progress = range(len(target_batches))
            for i in progress:
                # batch_size_actual = target_batches[i].size(0)

                param_list, inputs_desc, length_desc = transform_params2tensors(data_batches[i], length_batches[i])
                logits = self.model(inputs_desc, length_desc, *param_list)

                logits_softmax = {}
                if isinstance(self.model, nn.DataParallel):
                    for tmp_output_layer_id in self.model.module.output_layer_id:
                        if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                                (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                            logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                logits[tmp_output_layer_id], dim=-1)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



