in pipeline/pipelineUtils/azure_openai.py [0:0]
def run_prompt(system_prompt, user_prompt):
token_provider = get_bearer_token_provider(
config.credential,
"https://cognitiveservices.azure.com/.default"
)
token = config.credential.get_token("https://cognitiveservices.azure.com/.default").token
openai_client = AzureOpenAI(
azure_ad_token=token,
api_version = OPENAI_API_VERSION,
azure_endpoint =OPENAI_API_BASE
)
logging.info(f"User Prompt: {user_prompt}")
logging.info(f"System Prompt: {system_prompt}")
try:
response = openai_client.chat.completions.create(
model=OPENAI_MODEL,
messages=[{ "role": "system", "content": system_prompt},
{"role":"user","content":user_prompt}])
except Exception as e:
logging.error(f"Error calling OpenAI API: {e}")
return None
return response.choices[0].message.content