LearningMachine.py [246:265]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                    elif ProblemTypes[self.problem.problem_type] == ProblemTypes.mrc:
                        for key, value in logits.items():
                            logits[key] = value.squeeze()
                        for key, value in logits_softmax.items():
                            logits_softmax[key] = value.squeeze()
                        passage_identify = None
                        for type_key in data_batches[i].keys():
                            if 'p' in type_key.lower():
                                passage_identify = type_key
                                break
                        if not passage_identify:
                            raise Exception('MRC task need passage information.')
                        prediction = self.problem.decode(logits_softmax, lengths=length_batches[i][passage_identify],
                                                        batch_data=data_batches[i][passage_identify])
                        logits_flat = logits
                        mrc_answer_target = None
                        for single_target in target_batches[i]:
                            if isinstance(target_batches[i][single_target][0], str):
                                mrc_answer_target = target_batches[i][single_target]
                        streaming_recoder.record_one_row([prediction, mrc_answer_target])
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



LearningMachine.py [546:565]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                elif ProblemTypes[self.problem.problem_type] == ProblemTypes.mrc:
                    for key, value in logits.items():
                        logits[key] = value.squeeze()
                    for key, value in logits_softmax.items():
                        logits_softmax[key] = value.squeeze()
                    passage_identify = None
                    for type_key in data_batches[i].keys():
                        if 'p' in type_key.lower():
                            passage_identify = type_key
                            break
                    if not passage_identify:
                        raise Exception('MRC task need passage information.')
                    prediction = self.problem.decode(logits_softmax, lengths=length_batches[i][passage_identify],
                                                     batch_data=data_batches[i][passage_identify])
                    logits_flat = logits
                    mrc_answer_target = None
                    for single_target in target_batches[i]:
                        if isinstance(target_batches[i][single_target][0], str):
                            mrc_answer_target = target_batches[i][single_target]
                    streaming_recoder.record_one_row([prediction, mrc_answer_target])
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



