def _pre_forward_train()

in diffq/diffq.py [0:0]


    def _pre_forward_train(self):
        if not self._optimizer_setup:
            raise RuntimeError("You must call `setup_optimizer()` on your optimizer "
                               "before starting training.")
        for qparam in self._qparams:
            if qparam.other is not None:
                noisy = qparam.other.module._parameters[qparam.other.name]
            else:
                bits = self._get_bits(qparam.logit)[:, None]
                if self.group_size == 0:
                    p_flat = qparam.param.view(-1)
                else:
                    p_flat = qparam.param.view(-1, self.group_size)
                scale = p_flat.max() - p_flat.min()
                unit = 1 / (2**bits - 1)
                if self.noise == "uniform":
                    noise_source = (torch.rand_like(p_flat) - 0.5)
                elif self.noise == "gaussian":
                    noise_source = torch.randn_like(p_flat) / 2
                noise = scale * unit * noise_source
                noisy = p_flat + noise
            # We bypass the checks by PyTorch on parameters being leafs
            qparam.module._parameters[qparam.name] = noisy.view_as(qparam.param)
        return True