def check_deployment_status()

in assets/large_language_models/rag/components/src/validate_deployments.py [0:0]


def check_deployment_status(model_params, model_type, activity_logger=None):
    """Check deployment status of the model deployment in AOAI.

    Attempt to create the deployment, but if the deployment_name does not match what customer wanted,
        throw Exception.
    """
    if (model_params == {}):
        return True

    openai.api_type = model_params["openai_api_type"]
    openai.api_version = model_params["openai_api_version"]
    openai.base_url = model_params["openai_api_base"]
    openai.api_key = model_params["openai_api_key"]
    if (model_params["openai_api_type"].lower() != "azure" or not model_params["deployment_id"]):
        # If OAI (not-azure), just pass through validation
        activity_logger.info(
            "[Validate Deployments]: Not an Azure Open AI resource - pass through validation.",
            extra={
                'properties': {
                    'openai_api_type': model_params["openai_api_type"].lower(),
                    'deployment_id': model_params["deployment_id"]}})
        return True

    ws, _ = get_workspace_and_run()

    if ("default_aoai_name" in model_params and
            split_details(model_params["connection"], start=1)["connections"] == "Default_AzureOpenAI"):
        # Special control plane validation for default AOAI connection
        activity_logger.info(
            "[Validate Deployments]: Default AOAI resource detected. Performing control plane validations now...",
            extra={
                'properties': {
                    'model_type': model_type,
                    'model_name': model_params["model_name"],
                    'deployment_name': model_params["deployment_id"]}})
        return validate_and_create_default_aoai_resource(ws, model_params, activity_logger)
    else:
        activity_logger.info(
            "[Validate Deployments]: Non-default AOAI resource detected. Performing data plane validations now...",
            extra={
                'properties': {
                    'model_type': model_type,
                    'model_name': model_params["model_name"],
                    'deployment_name': model_params["deployment_id"]}})
        # Data plane validation for non-default AOAI resource
        if model_type == "llm":
            from langchain.llms import AzureOpenAI
            from langchain.chat_models import AzureChatOpenAI
            from langchain.chains import LLMChain
            from langchain.prompts import PromptTemplate
            if "gpt-" in model_params["model_name"]:
                model_kwargs = {
                    "engine": model_params["deployment_id"],
                    "frequency_penalty": 0,
                    "presence_penalty": 0
                }
                llm = AzureChatOpenAI(
                    deployment_name=model_params["deployment_id"],
                    model_name=model_params["model_name"],
                    model_kwargs=model_kwargs,
                    openai_api_key=model_params["openai_api_key"],
                    openai_api_base=model_params["openai_api_base"],
                    openai_api_version=model_params["openai_api_version"],
                    openai_api_type=model_params["openai_api_type"])
            else:
                llm = AzureOpenAI(
                    deployment_name=model_params["deployment_id"],
                    model_name=model_params["model_name"],
                    openai_api_key=model_params["openai_api_key"])
            try:
                template = "Answer the following question" + \
                    "\n\nContext:\n{context}\n\nQuestion: {question}\n\n Answer:"
                prompt = PromptTemplate(template=template, input_variables=[
                                        "context", "question"])
                llm_chain = LLMChain(prompt=prompt, llm=llm)
                llm_chain.run({
                    'context': "Say Yes if you received the the question",
                    'question': "Did you receive the question?"
                })
            except BadRequestError as ex:
                activity_logger.info("ValidationFailed: completion model deployment validation failed due to the "
                                     + "following exception: {}.".format(traceback.format_exc()))
                activity_logger.exception(
                    "ValidationFailed with exception: completion model deployment validation failed due to the "
                    + "following exception: {}.".format(traceback.format_exc()))
                if ("Resource not found" in str(ex) or
                        "The API deployment for this resource does not exist" in str(ex)):
                    raise Exception(
                        "DeploymentValidationFailed: please submit a model deployment which exists "
                        + "in your AOAI workspace.")
                else:
                    raise
        elif model_type == "embedding":
            from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
            if (model_params["openai_api_type"].lower() == "azure"):
                embeddings = AzureOpenAIEmbeddings(
                    model=model_params["model_name"],
                    deployment=model_params["deployment_id"],
                    azure_endpoint=model_params["openai_api_base"],
                    openai_api_type="azure",
                    api_key=model_params["openai_api_key"])
            else:
                embeddings = OpenAIEmbeddings(
                    model=model_params["model_name"],
                    deployment=model_params["deployment_id"],
                    openai_api_key=model_params["openai_api_key"],
                    openai_api_type="open_ai")
            try:
                embeddings.embed_query(
                    "Embed this query to test if deployment exists")
            except BadRequestError as ex:
                activity_logger.info(
                    "ValidationFailed: embeddings deployment validation failed due to the following exception: {}."
                    .format(traceback.format_exc()))
                if ("Resource not found" in str(ex) or
                        "The API deployment for this resource does not exist" in str(ex)):
                    raise Exception(
                        "DeploymentValidationFailed: please submit an embedding deployment which exists "
                        + "in your AOAI workspace.")
                else:
                    raise
        return True