def predict_and_access()

in 3_optimization-design-ptn/03_prompt-optimization/promptwizard/glue/promptopt/instantiate.py [0:0]


    def predict_and_access(self, question: str, gt_answer: str) -> (bool, str, str):
        """
        For the given input question, get answer to it from LLM, using the BEST_PROMPT & EXPERT_PROFILE
        computes earlier.

        :param question: Question to be asked to LLM, to solve
        :param gt_answer: Ground truth, final answer.
        :return:  (is_correct, predicted_ans, llm_output)
                is_correct -> Tells if prediction by LLM was correct.
                predicted_ans -> is the actual predicted answer by LLM.
                llm_output -> Output text generated by LLM for the given question
        :rtype: (bool, str, str)
        """
        final_prompt = self.prompt_pool.eval_prompt.format(
            instruction=self.BEST_PROMPT, question=question
        )
        llm_output = self.prompt_opt.chat_completion(
            user_prompt=final_prompt, system_prompt=self.EXPERT_PROFILE
        )

        is_correct, predicted_ans = self.data_processor.access_answer(
            llm_output, gt_answer
        )
        return {
            self.EvalLiterals.IS_CORRECT: is_correct,
            self.EvalLiterals.PREDICTED_ANS: predicted_ans,
            self.EvalLiterals.LLM_OUTPUT: llm_output,
        }