in preferences.py [0:0]
def draw(self, context):
layout = self.layout
backend_box = layout.box()
backend_box.label(text="Backend", icon="SETTINGS")
backend_box.prop(self, "backend_type", expand=True)
if self.backend_type == "LOCAL":
info_box = backend_box.box()
info_box.label(text="Run models directly in Blender.", icon="INFO")
col = info_box.column(align=True)
col.label(text="Requires at least 8GB VRAM.")
col.label(text="Small local models may struggle with agentic behaviors.")
local_box = layout.box()
header = local_box.row(align=True)
header.label(text="Downloaded models", icon="PACKAGE")
header.operator("meshgen.open_models_folder", text="", icon="FILE_FOLDER")
models = get_available_models()
if models:
local_box.prop(self, "current_model")
elif not self.downloading:
local_box.label(text="No models downloaded.", icon="INFO")
op = local_box.operator(
"meshgen.download_model",
text="Download Recommended Model",
icon="IMPORT",
)
op.repo_id = self.download_repo_id
op.filename = self.download_filename
if self.downloading:
row = local_box.row(align=True)
row.label(text="Downloading...")
row.prop(self, "download_progress", slider=True, text="")
else:
remote_box = layout.box()
remote_box.prop(self, "llm_provider")
remote_box.separator()
if self.llm_provider == "ollama":
info_box = remote_box.box()
info_box.label(
text="Run models with a local Ollama server.", icon="INFO"
)
col = info_box.column(align=True)
col.label(text="1. Download and install Ollama from ollama.com")
col.label(text="2. Run `ollama serve` in the terminal")
remote_box.separator()
remote_box.prop(self, "ollama_endpoint")
remote_box.prop(self, "ollama_model_name")
remote_box.prop(self, "ollama_api_key")
elif self.llm_provider == "huggingface":
info_box = remote_box.box()
info_box.label(
text="Run models with the Hugging Face API.", icon="INFO"
)
col = info_box.column(align=True)
col.label(text="1. Create an account on hf.co")
col.label(text="2. Go to hf.co/settings/tokens and create a new token")
col.label(text="3. Paste the token into the API key field")
remote_box.separator()
remote_box.prop(self, "huggingface_model_id")
remote_box.prop(self, "huggingface_api_key")
elif self.llm_provider == "anthropic":
info_box = remote_box.box()
info_box.label(text="Run models with the Anthropic API.", icon="INFO")
col = info_box.column(align=True)
col.label(text="1. Create an account on console.anthropic.com")
col.label(
text="2. Go to console.anthropic.com/settings/keys and create a key"
)
col.label(text="3. Paste the key into the API key field")
remote_box.separator()
remote_box.prop(self, "anthropic_model_id")
remote_box.prop(self, "anthropic_api_key")
elif self.llm_provider == "openai":
info_box = remote_box.box()
info_box.label(text="Run models with the OpenAI API.", icon="INFO")
col = info_box.column(align=True)
col.label(text="1. Create an account on platform.openai.com")
col.label(
text="2. Go to platform.openai.com/api-keys and create a new secret key"
)
col.label(text="3. Paste the secret key into the API key field")
remote_box.separator()
remote_box.prop(self, "openai_model_id")
remote_box.prop(self, "openai_api_key")
options_box = layout.box()
header = options_box.row(align=True)
header.prop(
self,
"show_generation_settings",
icon="TRIA_DOWN" if self.show_generation_settings else "TRIA_RIGHT",
icon_only=True,
emboss=False,
)
header.label(text="Generation Settings")
if self.show_generation_settings:
options_box.prop(self, "temperature", slider=True)
options_box.prop(self, "context_length", slider=True)
plugin_box = layout.box()
header = plugin_box.row(align=True)
header.prop(
self,
"show_integrations_settings",
icon="TRIA_DOWN" if self.show_integrations_settings else "TRIA_RIGHT",
icon_only=True,
emboss=False,
)
header.label(text="Integrations")
if self.show_integrations_settings:
llama_mesh_box = plugin_box.box()
llama_mesh_box.label(text="LLaMA-Mesh", icon="PACKAGE")
llama_mesh_box.label(
text="Use LLaMA-Mesh for local mesh generation and understanding.",
)
if LlamaMeshModelManager.instance().is_loaded:
llama_mesh_box.label(text="LLaMA-Mesh is loaded", icon="CHECKBOX_HLT")
llama_mesh_box.operator(
"meshgen.unload_llama_mesh", text="Unload LLaMA-Mesh", icon="X"
)
else:
if self.backend_type == "LOCAL":
llama_mesh_box.label(
text="Requires 5GB additional VRAM. Not recommended with local backend.",
icon="ERROR",
)
llama_mesh_box.operator(
"meshgen.load_llama_mesh", text="Load LLaMA-Mesh", icon="IMPORT"
)
hyper3d_box = plugin_box.box()
hyper3d_box.label(text="Hyper3D", icon="PACKAGE")
hyper3d_box.label(
text="Use Hyper3D (Rodin) API for mesh generation.",
)
hyper3d_box.prop(self, "enable_hyper3d")
if self.enable_hyper3d:
hyper3d_box.prop(self, "hyper3d_api_key")