in lib/models/mlp.py [0:0]
def __init__(self, n_in, n_out, layers=[1024, 512, 256], activation='lrelu', batchnorm=False, dropout=0.):
super(MLP_Discriminator, self).__init__()
network = []
n_previous = n_in
for l in layers:
network.append(nn.Linear(n_previous, l))
if batchnorm:
network.append(nn.BatchNorm1d(l))
if activation == 'relu':
network.append(nn.ReLU())
elif activation == 'lrelu':
network.append(nn.LeakyReLU(0.2))
if dropout:
network.append(nn.Dropout(dropout))
n_previous = l
network.append(nn.Linear(n_previous, n_out))
self.network = nn.Sequential(*network)