in src/co_op_translator/config/llm_config/config.py [0:0]
def get_service_config(cls, provider: LLMProvider) -> LLMServiceConfig:
"""
Build env_vars for each provider, validate them, and return LLMServiceConfig if valid.
"""
if provider == LLMProvider.AZURE_OPENAI:
azure_config = AzureOpenAIConfig()
env_vars = {
"AZURE_OPENAI_API_KEY": azure_config.get_api_key(),
"AZURE_OPENAI_ENDPOINT": azure_config.get_endpoint(),
"AZURE_OPENAI_MODEL_NAME": azure_config.get_model_name(),
"AZURE_OPENAI_CHAT_DEPLOYMENT_NAME": azure_config.get_chat_deployment_name(),
"AZURE_OPENAI_API_VERSION": azure_config.get_api_version(),
}
cls.validate_env_vars(env_vars, provider)
return LLMServiceConfig(required=True, env_vars=env_vars)
elif provider == LLMProvider.OPENAI:
openai_config = OpenAIConfig()
env_vars = {
"OPENAI_API_KEY": openai_config.get_api_key(),
"OPENAI_ORG_ID": openai_config.get_org_id(),
"OPENAI_CHAT_MODEL_ID": openai_config.get_chat_model_id(),
}
cls.validate_env_vars(env_vars, provider)
return LLMServiceConfig(required=False, env_vars=env_vars)
else:
raise ValueError(
f"Unknown LLM provider: {provider}. Expected one of: {[e.name for e in LLMProvider]}"
)