in src/co_op_translator/core/llm/markdown_translator.py [0:0]
def create(cls, root_dir: Path = None) -> "MarkdownTranslator":
"""
Factory method to create appropriate markdown translator based on available provider.
Args:
root_dir: Optional root directory for the project
Returns:
MarkdownTranslator: An instance of the appropriate markdown translator.
"""
provider = LLMConfig.get_available_provider()
if provider is None:
raise ValueError("No valid LLM provider configured")
if provider == LLMProvider.AZURE_OPENAI:
from co_op_translator.core.llm.providers.azure.markdown_translator import (
AzureMarkdownTranslator,
)
return AzureMarkdownTranslator(root_dir)
elif provider == LLMProvider.OPENAI:
from co_op_translator.core.llm.providers.openai.markdown_translator import (
OpenAIMarkdownTranslator,
)
return OpenAIMarkdownTranslator(root_dir)
else:
raise ValueError(f"Unsupported provider: {provider}")