in opacus_lab/models/GPT2/model/transformer.py [0:0]
def __init__(
self,
layers: int,
pad_idx: int,
words: int,
seq_len: int,
heads: int,
dims: int,
rate: int = 4,
dropout: float = 0.1,
finetune: int = -1,
lm_head_rank: int = 768,
use_low_rank_head: bool = False,
perturb: bool = True,
bidirectional: bool = True,