LearningMachine.py [446:468]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            for i in progress:
                # batch_size_actual = target_batches[i].size(0)

                param_list, inputs_desc, length_desc = transform_params2tensors(data_batches[i], length_batches[i])
                logits = self.model(inputs_desc, length_desc, *param_list)

                logits_softmax = {}
                if isinstance(self.model, nn.DataParallel):
                    for tmp_output_layer_id in self.model.module.output_layer_id:
                        if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                                (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                            logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                logits[tmp_output_layer_id], dim=-1)
                        else:
                            logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
                else:
                    for tmp_output_layer_id in self.model.output_layer_id:
                        if isinstance(self.model.layers[tmp_output_layer_id], Linear) and \
                                (not self.model.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                            logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                logits[tmp_output_layer_id], dim=-1)
                        else:
                            logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



LearningMachine.py [693:714]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                for i in progress:
                    # batch_size_actual = target_batches[i].size(0)
                    param_list, inputs_desc, length_desc = transform_params2tensors(data_batches[i], length_batches[i])
                    logits = self.model(inputs_desc, length_desc, *param_list)

                    logits_softmax = {}
                    if isinstance(self.model, nn.DataParallel):
                        for tmp_output_layer_id in self.model.module.output_layer_id:
                            if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                                    (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                                logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                    logits[tmp_output_layer_id], dim=-1)
                            else:
                                logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
                    else:
                        for tmp_output_layer_id in self.model.output_layer_id:
                            if isinstance(self.model.layers[tmp_output_layer_id], Linear) and \
                                    (not self.model.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                                logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                    logits[tmp_output_layer_id], dim=-1)
                            else:
                                logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



