in threestudio/models/prompt_processors/base.py [0:0]
def prepare_text_embeddings(self):
os.makedirs(self._cache_dir, exist_ok=True)
all_prompts = (
[self.prompt]
+ [self.negative_prompt]
+ self.prompts_vd
+ self.negative_prompts_vd
)
prompts_to_process = []
for prompt in all_prompts:
if self.cfg.use_cache:
# some text embeddings are already in cache
# do not process them
cache_path = os.path.join(
self._cache_dir,
f"{hash_prompt(self.cfg.pretrained_model_name_or_path, prompt)}.pt",
)
if os.path.exists(cache_path):
threestudio.debug(
f"Text embeddings for model {self.cfg.pretrained_model_name_or_path} and prompt [{prompt}] are already in cache, skip processing."
)
continue
prompts_to_process.append(prompt)
if len(prompts_to_process) > 0:
if self.cfg.spawn:
ctx = mp.get_context("spawn")
subprocess = ctx.Process(
target=self.spawn_func,
args=(
self.cfg.pretrained_model_name_or_path,
prompts_to_process,
self._cache_dir,
self.device
),
)
subprocess.start()
subprocess.join()
else:
self.spawn_func(
self.cfg.pretrained_model_name_or_path,
prompts_to_process,
self._cache_dir,
self.device
)
cleanup()