lib/models/mlp.py [16:25]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        network = []
        n_previous = n_in
        for l in layers:
            network.append(nn.Linear(n_previous, l))
            if batchnorm:
                network.append(nn.BatchNorm1d(l))
            if activation == 'relu':
                network.append(nn.ReLU())
            elif activation == 'lrelu':
                network.append(nn.LeakyReLU(0.2))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



lib/models/mlp.py [42:51]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        network = []
        n_previous = n_in
        for l in layers:
            network.append(nn.Linear(n_previous, l))
            if batchnorm:
                network.append(nn.BatchNorm1d(l))
            if activation == 'relu':
                network.append(nn.ReLU())
            elif activation == 'lrelu':
                network.append(nn.LeakyReLU(0.2))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



