def chat_completion()

in 3_optimization-design-ptn/03_prompt-optimization/promptwizard/glue/common/llm/llm_mgr.py [0:0]


    def chat_completion(messages: Dict):
        llm_handle = os.environ.get("MODEL_TYPE", "AzureOpenAI")
        try:
            if llm_handle == "AzureOpenAI":
                # Code to for calling LLMs
                return call_api(messages)
            elif llm_handle == "LLamaAML":
                # Code to for calling SLMs
                return 0
        except Exception as e:
            print(e)
            return "Sorry, I am not able to understand your query. Please try again."