lib/models/conv.py [73:92]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if activation == 'relu':
            self.activation = F.relu
        elif activation == 'lrelu':
            self.activation = lambda x: F.leaky_relu(x, 0.2)
        else:
            raise ValueError()

        self.batchnorm = batchnorm

    def forward(self, x):
        x = self.activation(self.conv1(x))

        x = self.conv2(x)
        if self.batchnorm:
            x = self.conv2_bn(x)
        x = self.activation(x)

        x = self.conv3(x)
        if self.batchnorm:
            x = self.conv3_bn(x)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



lib/models/conv.py [162:185]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if activation == 'relu':
            self.activation = F.relu
        elif activation == 'lrelu':
            self.activation = lambda x: F.leaky_relu(x, 0.2)
        else:
            raise ValueError()

        self.batchnorm = batchnorm


    def forward(self, x):
        x = self.activation(self.conv1(x))
        #print x.size()

        x = self.conv2(x)
        #print x.size()
        if self.batchnorm:
            x = self.conv2_bn(x)
        x = self.activation(x)

        x = self.conv3(x)
        #print x.size()
        if self.batchnorm:
            x = self.conv3_bn(x)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



