def __init__()

in sample_info/methods/classifiers.py [0:0]


    def __init__(self, model, train_data, val_data=None, l2_reg_coef=0.0, **kwargs):
        super(LinearizedModelV2, self).__init__(**kwargs)
        self.model = model
        self.train_data = train_data
        self.val_data = val_data
        self.l2_reg_coef = l2_reg_coef

        # copy the parameters at initialization
        self.init_params = copy.deepcopy(dict(model.named_parameters()))
        for k, v in self.init_params.items():
            v.detach_()
            v.requires_grad = False  # to stop training

        # compute all gradients
        self.jacobians = dict()
        jacobian_estimator = JacobianEstimator(projection='none')
        self.jacobians['train'] = jacobian_estimator.compute_jacobian(model=model, dataset=train_data, cpu=False)
        if val_data is not None:
            self.jacobians['val'] = jacobian_estimator.compute_jacobian(model=model, dataset=val_data, cpu=False)
        for partition in self.jacobians.keys():
            for k, v in self.jacobians[partition].items():
                v.detach_()  # in case they some computation graph was built

        # compute predictions at initialization
        self.init_preds = dict()
        self.init_preds['train'] = utils.apply_on_dataset(model=model, dataset=train_data,
                                                          output_keys_regexp='pred', cpu=False)['pred']
        if val_data is not None:
            self.init_preds['val'] = utils.apply_on_dataset(model=model, dataset=val_data,
                                                            output_keys_regexp='pred', cpu=False)['pred']
        for partition in self.init_preds.keys():
            self.init_preds[partition].detach_()  # in case they some computation graph was built