in models/res2net101_26w_4s.py [0:0]
def __init__(self):
super().__init__()
self.conv1 = torch.nn.modules.conv.Conv2d(3, 64, (7, 7), stride=(2, 2), padding=(3, 3), dilation=(1, 1), bias=False)
self.bn1 = torch.nn.modules.batchnorm.BatchNorm2d(64)
self.relu = torch.nn.modules.activation.ReLU(inplace=True)
self.maxpool = torch.nn.modules.pooling.MaxPool2d(3, stride=2, padding=1)
self.layer1_0_conv1 = torch.nn.modules.conv.Conv2d(64, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=1, padding=1)
self.layer1_0_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_downsample_0 = torch.nn.modules.conv.Conv2d(64, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_conv1 = torch.nn.modules.conv.Conv2d(256, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer2_0_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_downsample_0 = torch.nn.modules.conv.Conv2d(256, 512, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_conv1 = torch.nn.modules.conv.Conv2d(512, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer3_0_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_downsample_0 = torch.nn.modules.conv.Conv2d(512, 1024, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_4_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_4_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_5_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_5_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_6_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_6_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_7_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_7_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_8_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_8_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_9_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_9_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_10_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_10_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_11_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_11_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_12_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_12_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_13_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_13_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_14_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_14_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_15_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_15_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_16_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_16_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_17_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_17_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_18_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_18_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_19_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_19_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_20_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_20_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_21_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_21_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_22_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_22_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_conv1 = torch.nn.modules.conv.Conv2d(1024, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer4_0_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_downsample_0 = torch.nn.modules.conv.Conv2d(1024, 2048, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.avgpool = torch.nn.modules.pooling.AdaptiveAvgPool2d(1)
self.fc = torch.nn.modules.linear.Linear(2048, 1000)