def get_model()

in container_inference/mm3d/predictor.py [0:0]


    def get_model(cls):
        """Get the model object for this instance, loading it if it's not already loaded."""
        if cls.model == None:
            device = "cuda" if torch.cuda.is_available() else "cpu"
                
            config_file = glob(f'{model_path}/*.py')[0]
            checkpoint_file = glob(f"{model_path}/*.pth")[0]
            print(f"Loading config file {config_file} from path {model_path}")

            cls.model = init_model(config_file, checkpoint_file, device=device)
            
        return cls.model