in hype_kg/codes/run.py [0:0]
def save_model(model, optimizer, save_variable_list, args, before_finetune=False):
'''
Save the parameters of the model and the optimizer,
as well as some other variables such as step and learning_rate
'''
argparse_dict = vars(args)
with open(os.path.join(args.save_path, 'config.json' if not before_finetune else 'config_before.json'), 'w') as fjson:
json.dump(argparse_dict, fjson)
torch.save({
**save_variable_list,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()},
os.path.join(args.save_path, 'checkpoint' if not before_finetune else 'checkpoint_before')
)
entity_embedding = model.entity_embedding.detach().cpu().numpy()
np.save(
os.path.join(args.save_path, 'entity_embedding' if not before_finetune else 'entity_embedding_before'),
entity_embedding
)
relation_embedding = model.relation_embedding.detach().cpu().numpy()
np.save(
os.path.join(args.save_path, 'relation_embedding' if not before_finetune else 'relation_embedding_before'),
relation_embedding
)