def gelu_new()

in opacus_lab/models/GPT2/model/feedforward.py [0:0]


    def gelu_new(self, x):
        """
        Implementation of the GELU activation function currently in Google BERT
        repo (identical to OpenAI GPT). Also see the Gaussian Error Linear
        Units paper: https://arxiv.org/abs/1606.08415
        Note: This routine is taken from Huggingface/transformers V4.7.0
        """
        return (
            0.5
            * x
            * (
                1.0
                + torch.tanh(
                    math.sqrt(2.0 / math.pi) * (x + 0.044715 * torch.pow(x, 3.0))
                )
            )
        )