in cp_examples/sip_finetune/sip_finetune.py [0:0]
def validation_epoch_end(self, outputs):
# make sure we didn't change the pretrained weights
if self.pretrained_file is not None:
validate_pretrained_model(self.model.state_dict(), self.pretrained_file)
auc_vals = []
for i, path in enumerate(self.val_pathology_list):
logits = []
targets = []
for output in outputs:
logits.append(output["logits"][path].flatten())
targets.append(output["targets"][path].flatten())
logits = torch.cat(logits)
targets = torch.cat(targets)
print(f"path: {path}, len: {len(logits)}")
self.val_acc[i](logits, targets)
try:
auc_val = pl.metrics.functional.auroc(torch.sigmoid(logits), targets)
auc_vals.append(auc_val)
except ValueError:
auc_val = 0
print(f"path: {path}, auc_val: {auc_val}")
self.log(
f"val_metrics/accuracy_{path}",
self.val_acc[i],
on_step=False,
on_epoch=True,
)
self.log(f"val_metrics/auc_{path}", auc_val)
self.log("val_metrics/auc_mean", sum(auc_vals) / len(auc_vals))