def __init__()

in train_mnist.py [0:0]


    def __init__(self, ngpu, nc, ndf, norm='spectral', sigmoid=True):
        super(Discriminator, self).__init__()
        self.ngpu = ngpu
        self.norm = norm
        self.sigmoid = sigmoid
        # NOTE: made a special cose for BN because we don't normalize first layer
        # I kept it this way to be able to load pre-trained models
        if self.norm != 'batch':
            self.main = nn.Sequential(
                *normalize_module2D(nn.Conv2d(nc, ndf, 4, 2, 1, bias=True), norm, ndf),
                nn.LeakyReLU(0.2, inplace=True),

                *normalize_module2D(nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=True), norm, ndf * 2),
                nn.LeakyReLU(0.2, inplace=True),

                *normalize_module2D(nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=True), norm, ndf * 4),
                nn.LeakyReLU(0.2, inplace=True),

                nn.Conv2d(ndf * 4, 1, 4, 1, 0, bias=True),
            )
        else:
            self.main = nn.Sequential(
                nn.Conv2d(nc, ndf, 4, 2, 1, bias=True),
                nn.LeakyReLU(0.2, inplace=True),

                nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=True),
                nn.BatchNorm2d(ndf * 2),
                nn.LeakyReLU(0.2, inplace=True),

                nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=True),
                nn.BatchNorm2d(ndf * 4),
                nn.LeakyReLU(0.2, inplace=True),

                nn.Conv2d(ndf * 4, 1, 4, 1, 0, bias=True),
            )