in models/networks/vgg.py [0:0]
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m.last_layer:
assert m.kernel_size == (1, 1)
# Fully Connected Layer
i = 1
while hasattr(m, f"weight{i}"):
weight = getattr(m, f"weight{i}")
weight.data.normal_(0, 0.01)
bias = getattr(m, f"bias{i}", None)
if bias is not None:
bias.data.zero_()
i += 1
else:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
i = 1
while hasattr(m, f"weight{i}"):
weight = getattr(m, f"weight{i}")
weight.data.normal_(0, math.sqrt(2.0 / n))
bias = getattr(m, f"bias{i}", None)
if bias is not None:
bias.data.zero_()
i += 1
elif isinstance(m, (nn.modules.batchnorm._NormBase, nn.GroupNorm)):
i = 1
while hasattr(m, f"weight{i}"):
weight = getattr(m, f"weight{i}")
weight.data.fill_(0.5)
bias = getattr(m, f"bias{i}", None)
if bias is not None:
bias.data.zero_()
i += 1
elif isinstance(m, nn.Linear):
i = 1
while hasattr(m, f"weight{i}"):
weight = getattr(m, f"weight{i}")
weight.data.normal_(0, 0.01)
bias = getattr(m, f"bias{i}", None)
if bias is not None:
bias.data.zero_()
i += 1