in opacus_lab/models/GPT2/model/attention.py [0:0]
def __init__(self, heads: int, dims: int, dropout: float = 0.1):
super().__init__()
self.attn = MultiHeadAttention(heads, dropout)
self.proj_q = nn.Linear(dims, dims)
self.proj_k = nn.Linear(dims, dims)
self.proj_v = nn.Linear(dims, dims)
self.linear = nn.Linear(dims, dims)