in train/model.py [0:0]
def load_checkpoint(self, epoch, optimizer=None, suffix=''):
load_path = self.get_checkpoint_path(epoch, suffix)
assert os.path.exists(load_path), "Failed to load: {} (file not exist)".format(load_path)
checkpoint = torch.load(load_path)
all_params_matched = self.load_state(checkpoint['state_dict'], strict=False)
if optimizer:
if 'optimizer' in checkpoint.keys() and all_params_matched:
optimizer.load_state_dict(checkpoint['optimizer'])
logging.info("Model & Optimizer states are resumed from: `{}'".format(load_path))
else:
logging.warning(">> Failed to load optimizer state from: `{}'".format(load_path))
else:
logging.info("Only model state resumed from: `{}'".format(load_path))
if 'epoch' in checkpoint.keys():
if checkpoint['epoch'] != epoch:
logging.warning(">> Epoch information inconsistant: {} vs {}".format(checkpoint['epoch'], epoch))