in src/co_op_translator/config/llm_config/config.py [0:0]
def get_available_provider(cls) -> LLMProvider:
"""
1) Attempt Azure. If it fails:
- If error string contains "NO_CONFIG_AZURE", ignore it (means Azure is not set at all).
- Otherwise, raise that error (it must be an Incomplete config).
2) Attempt OpenAI similarly.
3) If both providers are "no config," raise "No LLM service is properly configured."
"""
azure_error = None
try:
cls.get_service_config(LLMProvider.AZURE_OPENAI)
return LLMProvider.AZURE_OPENAI
except ValueError as e:
if "NO_CONFIG_AZURE" in str(e):
azure_error = None # Means Azure is not configured at all
else:
azure_error = e # Incomplete or other error
openai_error = None
try:
cls.get_service_config(LLMProvider.OPENAI)
return LLMProvider.OPENAI
except ValueError as e:
if "NO_CONFIG_OPENAI" in str(e):
openai_error = None # Means OpenAI is not configured at all
else:
openai_error = e
# If azure_error and openai_error are both None => neither configured at all
if not azure_error and not openai_error:
raise ValueError("No LLM service is properly configured")
# Otherwise, raise the first "incomplete" error if it exists
if azure_error:
raise azure_error
if openai_error:
raise openai_error
# Fallback if something unexpected happened
raise ValueError("No LLM service is properly configured")