in models_all.py [0:0]
def __init__(self, nonlinearity="lrelu", use_wscale=True):
act, gain = {
"relu": (torch.relu, np.sqrt(2)),
"lrelu": (nn.LeakyReLU(negative_slope=0.2), np.sqrt(2)),
}[nonlinearity]
layers = [
("pixel_norm", PixelNormLayer()),
(
"dense0",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense0_act", act),
(
"dense1",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense1_act", act),
(
"dense2",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense2_act", act),
(
"dense3",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense3_act", act),
(
"dense4",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense4_act", act),
(
"dense5",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense5_act", act),
(
"dense6",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense6_act", act),
(
"dense7",
MyLinear(512, 512, gain=gain, lrmul=0.01, use_wscale=use_wscale),
),
("dense7_act", act),
]
super().__init__(OrderedDict(layers))