def chat_completion()

in 3_optimization-design-ptn/03_prompt-optimization/promptwizard/glue/promptopt/techniques/critique_n_refine/core_logic.py [0:0]


    def chat_completion(self, user_prompt: str, system_prompt: str = None):
        """
        Make a chat completion request to the OpenAI API.

        :param user_prompt: Text spoken by user in a conversation.
        :param system_prompt: Text spoken by system in a conversation.
        :return: Output of LLM
        """
        if not system_prompt:
            system_prompt = self.prompt_pool.system_prompt

        messages = [
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": user_prompt},
        ]
        response = LLMMgr.chat_completion(messages)
        return response