in src/models/struxgpt_base.py [0:0]
def close(self):
if self.model_is_api:
raise Warning('Closing API model is not configured. Please manually comment this line.')
pass
else:
import gc
import torch
if self.use_vllm:
try:
from vllm.model_executor.parallel_utils.parallel_state import destroy_model_parallel
os.environ['TOKENIZERS_PARALLELISM'] = 'false'
destroy_model_parallel()
except:
pass
del self.model.llm_engine
del self.model
# del self
gc.collect()
torch.cuda.empty_cache()
import ray
ray.shutdown()