in 3_optimization-design-ptn/03_prompt-optimization/promptwizard/glue/common/llm/llm_mgr.py [0:0]
def call_api(messages):
from openai import OpenAI
from azure.identity import get_bearer_token_provider, AzureCliCredential
from openai import AzureOpenAI
if os.environ["USE_OPENAI_API_KEY"] == "True":
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
response = client.chat.completions.create(
model=os.environ["OPENAI_MODEL_NAME"],
messages=messages,
temperature=0.0,
)
else:
print("=== Using Azure OpenAI ===")
token_provider = get_bearer_token_provider(
AzureCliCredential(), "https://cognitiveservices.azure.com/.default"
)
client = AzureOpenAI(
api_version=os.environ["OPENAI_API_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
# azure_ad_token_provider=token_provider,
)
response = client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],
messages=messages,
temperature=0.0,
)
prediction = response.choices[0].message.content
return prediction