in sing/fondation/trainer.py [0:0]
def __init__(self,
model,
train_loss,
eval_losses,
train_dataset,
eval_datasets,
epochs,
suffix="",
batch_size=32,
cuda=True,
parallel=False,
lr=0.0001,
checkpoint_path=None):
self.model = model
self.parallel = nn.DataParallel(model) if parallel else model
self.is_parallel = parallel
self.train_loss = train_loss
self.eval_losses = nn.ModuleDict(eval_losses)
self.batch_size = batch_size
self.cuda = cuda
self.suffix = suffix
self.train_dataset = train_dataset
self.eval_datasets = eval_datasets
self.epochs = epochs
self.checkpoint_path = checkpoint_path
parameters = [p for p in self.model.parameters() if p.requires_grad]
self.optimizer = optim.Adam(parameters, lr=lr)
if self.cuda:
self.model.cuda()
self.train_loss.cuda()
self.eval_losses.cuda()
else:
self.model.cpu()
self.train_loss.cpu()
self.eval_losses.cpu()