in opacus_lab/models/GPT2/model/embedding.py [0:0]
def forward(self, x: torch.Tensor, transposed: bool = False) -> torch.Tensor:
if transposed:
return torch.matmul(x, self.emb.weight.transpose(0, 1))
else:
return self.emb(x)