in backend.py [0:0]
def _load_litellm_model(self):
prefs = bpy.context.preferences.addons[__package__].preferences
kwargs = {}
if prefs.llm_provider == "ollama":
model_id = f"ollama_chat/{prefs.ollama_model_name}"
api_base = prefs.ollama_endpoint
api_key = prefs.ollama_api_key or None
kwargs["num_ctx"] = prefs.context_length
elif prefs.llm_provider == "anthropic":
model_id = f"anthropic/{prefs.anthropic_model_id}"
api_base = None
api_key = prefs.anthropic_api_key
elif prefs.llm_provider == "openai":
model_id = prefs.openai_model_id
api_base = None
api_key = prefs.openai_api_key
else:
raise ValueError(f"Unknown provider: {prefs.llm_provider}")
self.model = LiteLLMModel(
model_id=model_id,
api_base=api_base,
api_key=api_key,
**kwargs,
)
try:
input_messages = [
{
"role": "user",
"content": [
{
"type": "text",
"text": "Hello, world!",
}
],
}
]
self.model(input_messages)
except Exception as e:
self.model = None
raise e