def configure_optimizers()

in cp_examples/sip_finetune/sip_finetune.py [0:0]


    def configure_optimizers(self):
        if self.pretrained_file is None:
            model = self.model
        else:
            if hasattr(self.model, "classifier"):
                model = self.model.classifier
            elif hasattr(self.model, "fc"):
                model = self.model.fc
            else:
                raise RuntimeError("Unrecognized classifier.")

        optimizer = torch.optim.Adam(model.parameters(), self.learning_rate)
        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, self.epochs)

        return [optimizer], [scheduler]