LearningMachine.py [450:468]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                logits = self.model(inputs_desc, length_desc, *param_list)

                logits_softmax = {}
                if isinstance(self.model, nn.DataParallel):
                    for tmp_output_layer_id in self.model.module.output_layer_id:
                        if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                                (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                            logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                logits[tmp_output_layer_id], dim=-1)
                        else:
                            logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
                else:
                    for tmp_output_layer_id in self.model.output_layer_id:
                        if isinstance(self.model.layers[tmp_output_layer_id], Linear) and \
                                (not self.model.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                            logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                                logits[tmp_output_layer_id], dim=-1)
                        else:
                            logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



LearningMachine.py [807:825]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            logits = self.model(inputs_desc, length_desc, *param_list)

            logits_softmax = {}
            if isinstance(self.model, nn.DataParallel):
                for tmp_output_layer_id in self.model.module.output_layer_id:
                    if isinstance(self.model.module.layers[tmp_output_layer_id], Linear) and \
                            (not self.model.module.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                        logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                            logits[tmp_output_layer_id], dim=-1)
                    else:
                        logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
            else:
                for tmp_output_layer_id in self.model.output_layer_id:
                    if isinstance(self.model.layers[tmp_output_layer_id], Linear) and \
                            (not self.model.layers[tmp_output_layer_id].layer_conf.last_hidden_softmax):
                        logits_softmax[tmp_output_layer_id] = nn.functional.softmax(
                            logits[tmp_output_layer_id], dim=-1)
                    else:
                        logits_softmax[tmp_output_layer_id] = logits[tmp_output_layer_id]
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



