models/res2net101_26w_4s.py (1,267 lines of code) (raw):
import torch
import torch.nn
import torch.functional
import torch.nn.functional
class res2net101_26w_4s(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv1 = torch.nn.modules.conv.Conv2d(3, 64, (7, 7), stride=(2, 2), padding=(3, 3), dilation=(1, 1), bias=False)
self.bn1 = torch.nn.modules.batchnorm.BatchNorm2d(64)
self.relu = torch.nn.modules.activation.ReLU(inplace=True)
self.maxpool = torch.nn.modules.pooling.MaxPool2d(3, stride=2, padding=1)
self.layer1_0_conv1 = torch.nn.modules.conv.Conv2d(64, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=1, padding=1)
self.layer1_0_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_downsample_0 = torch.nn.modules.conv.Conv2d(64, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_conv1 = torch.nn.modules.conv.Conv2d(256, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer2_0_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_downsample_0 = torch.nn.modules.conv.Conv2d(256, 512, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_conv1 = torch.nn.modules.conv.Conv2d(512, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer3_0_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_downsample_0 = torch.nn.modules.conv.Conv2d(512, 1024, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_4_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_4_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_5_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_5_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_6_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_6_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_7_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_7_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_8_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_8_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_9_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_9_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_10_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_10_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_11_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_11_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_12_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_12_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_13_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_13_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_14_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_14_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_15_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_15_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_16_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_16_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_17_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_17_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_18_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_18_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_19_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_19_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_20_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_20_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_21_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_21_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_22_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_22_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_conv1 = torch.nn.modules.conv.Conv2d(1024, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer4_0_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_downsample_0 = torch.nn.modules.conv.Conv2d(1024, 2048, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.avgpool = torch.nn.modules.pooling.AdaptiveAvgPool2d(1)
self.fc = torch.nn.modules.linear.Linear(2048, 1000)
def forward(self, input_1):
conv1 = self.conv1(input_1)
bn1 = self.bn1(conv1)
relu = self.relu(bn1)
maxpool = self.maxpool(relu)
layer1_0_conv1 = self.layer1_0_conv1(maxpool)
layer1_0_bn1 = self.layer1_0_bn1(layer1_0_conv1)
layer1_0_relu = self.layer1_0_relu(layer1_0_bn1)
split_1 = torch.split(layer1_0_relu, 26, 1)
layer1_0_convs_0 = self.layer1_0_convs_0(split_1[0])
layer1_0_bns_0 = self.layer1_0_bns_0(layer1_0_convs_0)
layer1_0_relu_1 = self.layer1_0_relu_1(layer1_0_bns_0)
layer1_0_convs_1 = self.layer1_0_convs_1(split_1[1])
layer1_0_bns_1 = self.layer1_0_bns_1(layer1_0_convs_1)
layer1_0_relu_2 = self.layer1_0_relu_2(layer1_0_bns_1)
cat_1 = torch.cat([layer1_0_relu_1, layer1_0_relu_2], 1)
layer1_0_convs_2 = self.layer1_0_convs_2(split_1[2])
layer1_0_bns_2 = self.layer1_0_bns_2(layer1_0_convs_2)
layer1_0_relu_3 = self.layer1_0_relu_3(layer1_0_bns_2)
cat_2 = torch.cat([cat_1, layer1_0_relu_3], 1)
layer1_0_pool = self.layer1_0_pool(split_1[3])
cat_3 = torch.cat([cat_2, layer1_0_pool], 1)
layer1_0_conv3 = self.layer1_0_conv3(cat_3)
layer1_0_bn3 = self.layer1_0_bn3(layer1_0_conv3)
layer1_0_downsample_0 = self.layer1_0_downsample_0(maxpool)
layer1_0_downsample_1 = self.layer1_0_downsample_1(layer1_0_downsample_0)
add_1 = layer1_0_bn3.__iadd__(layer1_0_downsample_1)
layer1_0_relu_4 = self.layer1_0_relu_4(add_1)
layer1_1_conv1 = self.layer1_1_conv1(layer1_0_relu_4)
layer1_1_bn1 = self.layer1_1_bn1(layer1_1_conv1)
layer1_1_relu = self.layer1_1_relu(layer1_1_bn1)
split_2 = torch.split(layer1_1_relu, 26, 1)
layer1_1_convs_0 = self.layer1_1_convs_0(split_2[0])
layer1_1_bns_0 = self.layer1_1_bns_0(layer1_1_convs_0)
layer1_1_relu_1 = self.layer1_1_relu_1(layer1_1_bns_0)
add_2 = layer1_1_relu_1.__add__(split_2[1])
layer1_1_convs_1 = self.layer1_1_convs_1(add_2)
layer1_1_bns_1 = self.layer1_1_bns_1(layer1_1_convs_1)
layer1_1_relu_2 = self.layer1_1_relu_2(layer1_1_bns_1)
cat_4 = torch.cat([layer1_1_relu_1, layer1_1_relu_2], 1)
add_3 = layer1_1_relu_2.__add__(split_2[2])
layer1_1_convs_2 = self.layer1_1_convs_2(add_3)
layer1_1_bns_2 = self.layer1_1_bns_2(layer1_1_convs_2)
layer1_1_relu_3 = self.layer1_1_relu_3(layer1_1_bns_2)
cat_5 = torch.cat([cat_4, layer1_1_relu_3], 1)
cat_6 = torch.cat([cat_5, split_2[3]], 1)
layer1_1_conv3 = self.layer1_1_conv3(cat_6)
layer1_1_bn3 = self.layer1_1_bn3(layer1_1_conv3)
add_4 = layer1_1_bn3.__iadd__(layer1_0_relu_4)
layer1_1_relu_4 = self.layer1_1_relu_4(add_4)
layer1_2_conv1 = self.layer1_2_conv1(layer1_1_relu_4)
layer1_2_bn1 = self.layer1_2_bn1(layer1_2_conv1)
layer1_2_relu = self.layer1_2_relu(layer1_2_bn1)
split_3 = torch.split(layer1_2_relu, 26, 1)
layer1_2_convs_0 = self.layer1_2_convs_0(split_3[0])
layer1_2_bns_0 = self.layer1_2_bns_0(layer1_2_convs_0)
layer1_2_relu_1 = self.layer1_2_relu_1(layer1_2_bns_0)
add_5 = layer1_2_relu_1.__add__(split_3[1])
layer1_2_convs_1 = self.layer1_2_convs_1(add_5)
layer1_2_bns_1 = self.layer1_2_bns_1(layer1_2_convs_1)
layer1_2_relu_2 = self.layer1_2_relu_2(layer1_2_bns_1)
cat_7 = torch.cat([layer1_2_relu_1, layer1_2_relu_2], 1)
add_6 = layer1_2_relu_2.__add__(split_3[2])
layer1_2_convs_2 = self.layer1_2_convs_2(add_6)
layer1_2_bns_2 = self.layer1_2_bns_2(layer1_2_convs_2)
layer1_2_relu_3 = self.layer1_2_relu_3(layer1_2_bns_2)
cat_8 = torch.cat([cat_7, layer1_2_relu_3], 1)
cat_9 = torch.cat([cat_8, split_3[3]], 1)
layer1_2_conv3 = self.layer1_2_conv3(cat_9)
layer1_2_bn3 = self.layer1_2_bn3(layer1_2_conv3)
add_7 = layer1_2_bn3.__iadd__(layer1_1_relu_4)
layer1_2_relu_4 = self.layer1_2_relu_4(add_7)
layer2_0_conv1 = self.layer2_0_conv1(layer1_2_relu_4)
layer2_0_bn1 = self.layer2_0_bn1(layer2_0_conv1)
layer2_0_relu = self.layer2_0_relu(layer2_0_bn1)
split_4 = torch.split(layer2_0_relu, 52, 1)
layer2_0_convs_0 = self.layer2_0_convs_0(split_4[0])
layer2_0_bns_0 = self.layer2_0_bns_0(layer2_0_convs_0)
layer2_0_relu_1 = self.layer2_0_relu_1(layer2_0_bns_0)
layer2_0_convs_1 = self.layer2_0_convs_1(split_4[1])
layer2_0_bns_1 = self.layer2_0_bns_1(layer2_0_convs_1)
layer2_0_relu_2 = self.layer2_0_relu_2(layer2_0_bns_1)
cat_10 = torch.cat([layer2_0_relu_1, layer2_0_relu_2], 1)
layer2_0_convs_2 = self.layer2_0_convs_2(split_4[2])
layer2_0_bns_2 = self.layer2_0_bns_2(layer2_0_convs_2)
layer2_0_relu_3 = self.layer2_0_relu_3(layer2_0_bns_2)
cat_11 = torch.cat([cat_10, layer2_0_relu_3], 1)
layer2_0_pool = self.layer2_0_pool(split_4[3])
cat_12 = torch.cat([cat_11, layer2_0_pool], 1)
layer2_0_conv3 = self.layer2_0_conv3(cat_12)
layer2_0_bn3 = self.layer2_0_bn3(layer2_0_conv3)
layer2_0_downsample_0 = self.layer2_0_downsample_0(layer1_2_relu_4)
layer2_0_downsample_1 = self.layer2_0_downsample_1(layer2_0_downsample_0)
add_8 = layer2_0_bn3.__iadd__(layer2_0_downsample_1)
layer2_0_relu_4 = self.layer2_0_relu_4(add_8)
layer2_1_conv1 = self.layer2_1_conv1(layer2_0_relu_4)
layer2_1_bn1 = self.layer2_1_bn1(layer2_1_conv1)
layer2_1_relu = self.layer2_1_relu(layer2_1_bn1)
split_5 = torch.split(layer2_1_relu, 52, 1)
layer2_1_convs_0 = self.layer2_1_convs_0(split_5[0])
layer2_1_bns_0 = self.layer2_1_bns_0(layer2_1_convs_0)
layer2_1_relu_1 = self.layer2_1_relu_1(layer2_1_bns_0)
add_9 = layer2_1_relu_1.__add__(split_5[1])
layer2_1_convs_1 = self.layer2_1_convs_1(add_9)
layer2_1_bns_1 = self.layer2_1_bns_1(layer2_1_convs_1)
layer2_1_relu_2 = self.layer2_1_relu_2(layer2_1_bns_1)
cat_13 = torch.cat([layer2_1_relu_1, layer2_1_relu_2], 1)
add_10 = layer2_1_relu_2.__add__(split_5[2])
layer2_1_convs_2 = self.layer2_1_convs_2(add_10)
layer2_1_bns_2 = self.layer2_1_bns_2(layer2_1_convs_2)
layer2_1_relu_3 = self.layer2_1_relu_3(layer2_1_bns_2)
cat_14 = torch.cat([cat_13, layer2_1_relu_3], 1)
cat_15 = torch.cat([cat_14, split_5[3]], 1)
layer2_1_conv3 = self.layer2_1_conv3(cat_15)
layer2_1_bn3 = self.layer2_1_bn3(layer2_1_conv3)
add_11 = layer2_1_bn3.__iadd__(layer2_0_relu_4)
layer2_1_relu_4 = self.layer2_1_relu_4(add_11)
layer2_2_conv1 = self.layer2_2_conv1(layer2_1_relu_4)
layer2_2_bn1 = self.layer2_2_bn1(layer2_2_conv1)
layer2_2_relu = self.layer2_2_relu(layer2_2_bn1)
split_6 = torch.split(layer2_2_relu, 52, 1)
layer2_2_convs_0 = self.layer2_2_convs_0(split_6[0])
layer2_2_bns_0 = self.layer2_2_bns_0(layer2_2_convs_0)
layer2_2_relu_1 = self.layer2_2_relu_1(layer2_2_bns_0)
add_12 = layer2_2_relu_1.__add__(split_6[1])
layer2_2_convs_1 = self.layer2_2_convs_1(add_12)
layer2_2_bns_1 = self.layer2_2_bns_1(layer2_2_convs_1)
layer2_2_relu_2 = self.layer2_2_relu_2(layer2_2_bns_1)
cat_16 = torch.cat([layer2_2_relu_1, layer2_2_relu_2], 1)
add_13 = layer2_2_relu_2.__add__(split_6[2])
layer2_2_convs_2 = self.layer2_2_convs_2(add_13)
layer2_2_bns_2 = self.layer2_2_bns_2(layer2_2_convs_2)
layer2_2_relu_3 = self.layer2_2_relu_3(layer2_2_bns_2)
cat_17 = torch.cat([cat_16, layer2_2_relu_3], 1)
cat_18 = torch.cat([cat_17, split_6[3]], 1)
layer2_2_conv3 = self.layer2_2_conv3(cat_18)
layer2_2_bn3 = self.layer2_2_bn3(layer2_2_conv3)
add_14 = layer2_2_bn3.__iadd__(layer2_1_relu_4)
layer2_2_relu_4 = self.layer2_2_relu_4(add_14)
layer2_3_conv1 = self.layer2_3_conv1(layer2_2_relu_4)
layer2_3_bn1 = self.layer2_3_bn1(layer2_3_conv1)
layer2_3_relu = self.layer2_3_relu(layer2_3_bn1)
split_7 = torch.split(layer2_3_relu, 52, 1)
layer2_3_convs_0 = self.layer2_3_convs_0(split_7[0])
layer2_3_bns_0 = self.layer2_3_bns_0(layer2_3_convs_0)
layer2_3_relu_1 = self.layer2_3_relu_1(layer2_3_bns_0)
add_15 = layer2_3_relu_1.__add__(split_7[1])
layer2_3_convs_1 = self.layer2_3_convs_1(add_15)
layer2_3_bns_1 = self.layer2_3_bns_1(layer2_3_convs_1)
layer2_3_relu_2 = self.layer2_3_relu_2(layer2_3_bns_1)
cat_19 = torch.cat([layer2_3_relu_1, layer2_3_relu_2], 1)
add_16 = layer2_3_relu_2.__add__(split_7[2])
layer2_3_convs_2 = self.layer2_3_convs_2(add_16)
layer2_3_bns_2 = self.layer2_3_bns_2(layer2_3_convs_2)
layer2_3_relu_3 = self.layer2_3_relu_3(layer2_3_bns_2)
cat_20 = torch.cat([cat_19, layer2_3_relu_3], 1)
cat_21 = torch.cat([cat_20, split_7[3]], 1)
layer2_3_conv3 = self.layer2_3_conv3(cat_21)
layer2_3_bn3 = self.layer2_3_bn3(layer2_3_conv3)
add_17 = layer2_3_bn3.__iadd__(layer2_2_relu_4)
layer2_3_relu_4 = self.layer2_3_relu_4(add_17)
layer3_0_conv1 = self.layer3_0_conv1(layer2_3_relu_4)
layer3_0_bn1 = self.layer3_0_bn1(layer3_0_conv1)
layer3_0_relu = self.layer3_0_relu(layer3_0_bn1)
split_8 = torch.split(layer3_0_relu, 104, 1)
layer3_0_convs_0 = self.layer3_0_convs_0(split_8[0])
layer3_0_bns_0 = self.layer3_0_bns_0(layer3_0_convs_0)
layer3_0_relu_1 = self.layer3_0_relu_1(layer3_0_bns_0)
layer3_0_convs_1 = self.layer3_0_convs_1(split_8[1])
layer3_0_bns_1 = self.layer3_0_bns_1(layer3_0_convs_1)
layer3_0_relu_2 = self.layer3_0_relu_2(layer3_0_bns_1)
cat_22 = torch.cat([layer3_0_relu_1, layer3_0_relu_2], 1)
layer3_0_convs_2 = self.layer3_0_convs_2(split_8[2])
layer3_0_bns_2 = self.layer3_0_bns_2(layer3_0_convs_2)
layer3_0_relu_3 = self.layer3_0_relu_3(layer3_0_bns_2)
cat_23 = torch.cat([cat_22, layer3_0_relu_3], 1)
layer3_0_pool = self.layer3_0_pool(split_8[3])
cat_24 = torch.cat([cat_23, layer3_0_pool], 1)
layer3_0_conv3 = self.layer3_0_conv3(cat_24)
layer3_0_bn3 = self.layer3_0_bn3(layer3_0_conv3)
layer3_0_downsample_0 = self.layer3_0_downsample_0(layer2_3_relu_4)
layer3_0_downsample_1 = self.layer3_0_downsample_1(layer3_0_downsample_0)
add_18 = layer3_0_bn3.__iadd__(layer3_0_downsample_1)
layer3_0_relu_4 = self.layer3_0_relu_4(add_18)
layer3_1_conv1 = self.layer3_1_conv1(layer3_0_relu_4)
layer3_1_bn1 = self.layer3_1_bn1(layer3_1_conv1)
layer3_1_relu = self.layer3_1_relu(layer3_1_bn1)
split_9 = torch.split(layer3_1_relu, 104, 1)
layer3_1_convs_0 = self.layer3_1_convs_0(split_9[0])
layer3_1_bns_0 = self.layer3_1_bns_0(layer3_1_convs_0)
layer3_1_relu_1 = self.layer3_1_relu_1(layer3_1_bns_0)
add_19 = layer3_1_relu_1.__add__(split_9[1])
layer3_1_convs_1 = self.layer3_1_convs_1(add_19)
layer3_1_bns_1 = self.layer3_1_bns_1(layer3_1_convs_1)
layer3_1_relu_2 = self.layer3_1_relu_2(layer3_1_bns_1)
cat_25 = torch.cat([layer3_1_relu_1, layer3_1_relu_2], 1)
add_20 = layer3_1_relu_2.__add__(split_9[2])
layer3_1_convs_2 = self.layer3_1_convs_2(add_20)
layer3_1_bns_2 = self.layer3_1_bns_2(layer3_1_convs_2)
layer3_1_relu_3 = self.layer3_1_relu_3(layer3_1_bns_2)
cat_26 = torch.cat([cat_25, layer3_1_relu_3], 1)
cat_27 = torch.cat([cat_26, split_9[3]], 1)
layer3_1_conv3 = self.layer3_1_conv3(cat_27)
layer3_1_bn3 = self.layer3_1_bn3(layer3_1_conv3)
add_21 = layer3_1_bn3.__iadd__(layer3_0_relu_4)
layer3_1_relu_4 = self.layer3_1_relu_4(add_21)
layer3_2_conv1 = self.layer3_2_conv1(layer3_1_relu_4)
layer3_2_bn1 = self.layer3_2_bn1(layer3_2_conv1)
layer3_2_relu = self.layer3_2_relu(layer3_2_bn1)
split_10 = torch.split(layer3_2_relu, 104, 1)
layer3_2_convs_0 = self.layer3_2_convs_0(split_10[0])
layer3_2_bns_0 = self.layer3_2_bns_0(layer3_2_convs_0)
layer3_2_relu_1 = self.layer3_2_relu_1(layer3_2_bns_0)
add_22 = layer3_2_relu_1.__add__(split_10[1])
layer3_2_convs_1 = self.layer3_2_convs_1(add_22)
layer3_2_bns_1 = self.layer3_2_bns_1(layer3_2_convs_1)
layer3_2_relu_2 = self.layer3_2_relu_2(layer3_2_bns_1)
cat_28 = torch.cat([layer3_2_relu_1, layer3_2_relu_2], 1)
add_23 = layer3_2_relu_2.__add__(split_10[2])
layer3_2_convs_2 = self.layer3_2_convs_2(add_23)
layer3_2_bns_2 = self.layer3_2_bns_2(layer3_2_convs_2)
layer3_2_relu_3 = self.layer3_2_relu_3(layer3_2_bns_2)
cat_29 = torch.cat([cat_28, layer3_2_relu_3], 1)
cat_30 = torch.cat([cat_29, split_10[3]], 1)
layer3_2_conv3 = self.layer3_2_conv3(cat_30)
layer3_2_bn3 = self.layer3_2_bn3(layer3_2_conv3)
add_24 = layer3_2_bn3.__iadd__(layer3_1_relu_4)
layer3_2_relu_4 = self.layer3_2_relu_4(add_24)
layer3_3_conv1 = self.layer3_3_conv1(layer3_2_relu_4)
layer3_3_bn1 = self.layer3_3_bn1(layer3_3_conv1)
layer3_3_relu = self.layer3_3_relu(layer3_3_bn1)
split_11 = torch.split(layer3_3_relu, 104, 1)
layer3_3_convs_0 = self.layer3_3_convs_0(split_11[0])
layer3_3_bns_0 = self.layer3_3_bns_0(layer3_3_convs_0)
layer3_3_relu_1 = self.layer3_3_relu_1(layer3_3_bns_0)
add_25 = layer3_3_relu_1.__add__(split_11[1])
layer3_3_convs_1 = self.layer3_3_convs_1(add_25)
layer3_3_bns_1 = self.layer3_3_bns_1(layer3_3_convs_1)
layer3_3_relu_2 = self.layer3_3_relu_2(layer3_3_bns_1)
cat_31 = torch.cat([layer3_3_relu_1, layer3_3_relu_2], 1)
add_26 = layer3_3_relu_2.__add__(split_11[2])
layer3_3_convs_2 = self.layer3_3_convs_2(add_26)
layer3_3_bns_2 = self.layer3_3_bns_2(layer3_3_convs_2)
layer3_3_relu_3 = self.layer3_3_relu_3(layer3_3_bns_2)
cat_32 = torch.cat([cat_31, layer3_3_relu_3], 1)
cat_33 = torch.cat([cat_32, split_11[3]], 1)
layer3_3_conv3 = self.layer3_3_conv3(cat_33)
layer3_3_bn3 = self.layer3_3_bn3(layer3_3_conv3)
add_27 = layer3_3_bn3.__iadd__(layer3_2_relu_4)
layer3_3_relu_4 = self.layer3_3_relu_4(add_27)
layer3_4_conv1 = self.layer3_4_conv1(layer3_3_relu_4)
layer3_4_bn1 = self.layer3_4_bn1(layer3_4_conv1)
layer3_4_relu = self.layer3_4_relu(layer3_4_bn1)
split_12 = torch.split(layer3_4_relu, 104, 1)
layer3_4_convs_0 = self.layer3_4_convs_0(split_12[0])
layer3_4_bns_0 = self.layer3_4_bns_0(layer3_4_convs_0)
layer3_4_relu_1 = self.layer3_4_relu_1(layer3_4_bns_0)
add_28 = layer3_4_relu_1.__add__(split_12[1])
layer3_4_convs_1 = self.layer3_4_convs_1(add_28)
layer3_4_bns_1 = self.layer3_4_bns_1(layer3_4_convs_1)
layer3_4_relu_2 = self.layer3_4_relu_2(layer3_4_bns_1)
cat_34 = torch.cat([layer3_4_relu_1, layer3_4_relu_2], 1)
add_29 = layer3_4_relu_2.__add__(split_12[2])
layer3_4_convs_2 = self.layer3_4_convs_2(add_29)
layer3_4_bns_2 = self.layer3_4_bns_2(layer3_4_convs_2)
layer3_4_relu_3 = self.layer3_4_relu_3(layer3_4_bns_2)
cat_35 = torch.cat([cat_34, layer3_4_relu_3], 1)
cat_36 = torch.cat([cat_35, split_12[3]], 1)
layer3_4_conv3 = self.layer3_4_conv3(cat_36)
layer3_4_bn3 = self.layer3_4_bn3(layer3_4_conv3)
add_30 = layer3_4_bn3.__iadd__(layer3_3_relu_4)
layer3_4_relu_4 = self.layer3_4_relu_4(add_30)
layer3_5_conv1 = self.layer3_5_conv1(layer3_4_relu_4)
layer3_5_bn1 = self.layer3_5_bn1(layer3_5_conv1)
layer3_5_relu = self.layer3_5_relu(layer3_5_bn1)
split_13 = torch.split(layer3_5_relu, 104, 1)
layer3_5_convs_0 = self.layer3_5_convs_0(split_13[0])
layer3_5_bns_0 = self.layer3_5_bns_0(layer3_5_convs_0)
layer3_5_relu_1 = self.layer3_5_relu_1(layer3_5_bns_0)
add_31 = layer3_5_relu_1.__add__(split_13[1])
layer3_5_convs_1 = self.layer3_5_convs_1(add_31)
layer3_5_bns_1 = self.layer3_5_bns_1(layer3_5_convs_1)
layer3_5_relu_2 = self.layer3_5_relu_2(layer3_5_bns_1)
cat_37 = torch.cat([layer3_5_relu_1, layer3_5_relu_2], 1)
add_32 = layer3_5_relu_2.__add__(split_13[2])
layer3_5_convs_2 = self.layer3_5_convs_2(add_32)
layer3_5_bns_2 = self.layer3_5_bns_2(layer3_5_convs_2)
layer3_5_relu_3 = self.layer3_5_relu_3(layer3_5_bns_2)
cat_38 = torch.cat([cat_37, layer3_5_relu_3], 1)
cat_39 = torch.cat([cat_38, split_13[3]], 1)
layer3_5_conv3 = self.layer3_5_conv3(cat_39)
layer3_5_bn3 = self.layer3_5_bn3(layer3_5_conv3)
add_33 = layer3_5_bn3.__iadd__(layer3_4_relu_4)
layer3_5_relu_4 = self.layer3_5_relu_4(add_33)
layer3_6_conv1 = self.layer3_6_conv1(layer3_5_relu_4)
layer3_6_bn1 = self.layer3_6_bn1(layer3_6_conv1)
layer3_6_relu = self.layer3_6_relu(layer3_6_bn1)
split_14 = torch.split(layer3_6_relu, 104, 1)
layer3_6_convs_0 = self.layer3_6_convs_0(split_14[0])
layer3_6_bns_0 = self.layer3_6_bns_0(layer3_6_convs_0)
layer3_6_relu_1 = self.layer3_6_relu_1(layer3_6_bns_0)
add_34 = layer3_6_relu_1.__add__(split_14[1])
layer3_6_convs_1 = self.layer3_6_convs_1(add_34)
layer3_6_bns_1 = self.layer3_6_bns_1(layer3_6_convs_1)
layer3_6_relu_2 = self.layer3_6_relu_2(layer3_6_bns_1)
cat_40 = torch.cat([layer3_6_relu_1, layer3_6_relu_2], 1)
add_35 = layer3_6_relu_2.__add__(split_14[2])
layer3_6_convs_2 = self.layer3_6_convs_2(add_35)
layer3_6_bns_2 = self.layer3_6_bns_2(layer3_6_convs_2)
layer3_6_relu_3 = self.layer3_6_relu_3(layer3_6_bns_2)
cat_41 = torch.cat([cat_40, layer3_6_relu_3], 1)
cat_42 = torch.cat([cat_41, split_14[3]], 1)
layer3_6_conv3 = self.layer3_6_conv3(cat_42)
layer3_6_bn3 = self.layer3_6_bn3(layer3_6_conv3)
add_36 = layer3_6_bn3.__iadd__(layer3_5_relu_4)
layer3_6_relu_4 = self.layer3_6_relu_4(add_36)
layer3_7_conv1 = self.layer3_7_conv1(layer3_6_relu_4)
layer3_7_bn1 = self.layer3_7_bn1(layer3_7_conv1)
layer3_7_relu = self.layer3_7_relu(layer3_7_bn1)
split_15 = torch.split(layer3_7_relu, 104, 1)
layer3_7_convs_0 = self.layer3_7_convs_0(split_15[0])
layer3_7_bns_0 = self.layer3_7_bns_0(layer3_7_convs_0)
layer3_7_relu_1 = self.layer3_7_relu_1(layer3_7_bns_0)
add_37 = layer3_7_relu_1.__add__(split_15[1])
layer3_7_convs_1 = self.layer3_7_convs_1(add_37)
layer3_7_bns_1 = self.layer3_7_bns_1(layer3_7_convs_1)
layer3_7_relu_2 = self.layer3_7_relu_2(layer3_7_bns_1)
cat_43 = torch.cat([layer3_7_relu_1, layer3_7_relu_2], 1)
add_38 = layer3_7_relu_2.__add__(split_15[2])
layer3_7_convs_2 = self.layer3_7_convs_2(add_38)
layer3_7_bns_2 = self.layer3_7_bns_2(layer3_7_convs_2)
layer3_7_relu_3 = self.layer3_7_relu_3(layer3_7_bns_2)
cat_44 = torch.cat([cat_43, layer3_7_relu_3], 1)
cat_45 = torch.cat([cat_44, split_15[3]], 1)
layer3_7_conv3 = self.layer3_7_conv3(cat_45)
layer3_7_bn3 = self.layer3_7_bn3(layer3_7_conv3)
add_39 = layer3_7_bn3.__iadd__(layer3_6_relu_4)
layer3_7_relu_4 = self.layer3_7_relu_4(add_39)
layer3_8_conv1 = self.layer3_8_conv1(layer3_7_relu_4)
layer3_8_bn1 = self.layer3_8_bn1(layer3_8_conv1)
layer3_8_relu = self.layer3_8_relu(layer3_8_bn1)
split_16 = torch.split(layer3_8_relu, 104, 1)
layer3_8_convs_0 = self.layer3_8_convs_0(split_16[0])
layer3_8_bns_0 = self.layer3_8_bns_0(layer3_8_convs_0)
layer3_8_relu_1 = self.layer3_8_relu_1(layer3_8_bns_0)
add_40 = layer3_8_relu_1.__add__(split_16[1])
layer3_8_convs_1 = self.layer3_8_convs_1(add_40)
layer3_8_bns_1 = self.layer3_8_bns_1(layer3_8_convs_1)
layer3_8_relu_2 = self.layer3_8_relu_2(layer3_8_bns_1)
cat_46 = torch.cat([layer3_8_relu_1, layer3_8_relu_2], 1)
add_41 = layer3_8_relu_2.__add__(split_16[2])
layer3_8_convs_2 = self.layer3_8_convs_2(add_41)
layer3_8_bns_2 = self.layer3_8_bns_2(layer3_8_convs_2)
layer3_8_relu_3 = self.layer3_8_relu_3(layer3_8_bns_2)
cat_47 = torch.cat([cat_46, layer3_8_relu_3], 1)
cat_48 = torch.cat([cat_47, split_16[3]], 1)
layer3_8_conv3 = self.layer3_8_conv3(cat_48)
layer3_8_bn3 = self.layer3_8_bn3(layer3_8_conv3)
add_42 = layer3_8_bn3.__iadd__(layer3_7_relu_4)
layer3_8_relu_4 = self.layer3_8_relu_4(add_42)
layer3_9_conv1 = self.layer3_9_conv1(layer3_8_relu_4)
layer3_9_bn1 = self.layer3_9_bn1(layer3_9_conv1)
layer3_9_relu = self.layer3_9_relu(layer3_9_bn1)
split_17 = torch.split(layer3_9_relu, 104, 1)
layer3_9_convs_0 = self.layer3_9_convs_0(split_17[0])
layer3_9_bns_0 = self.layer3_9_bns_0(layer3_9_convs_0)
layer3_9_relu_1 = self.layer3_9_relu_1(layer3_9_bns_0)
add_43 = layer3_9_relu_1.__add__(split_17[1])
layer3_9_convs_1 = self.layer3_9_convs_1(add_43)
layer3_9_bns_1 = self.layer3_9_bns_1(layer3_9_convs_1)
layer3_9_relu_2 = self.layer3_9_relu_2(layer3_9_bns_1)
cat_49 = torch.cat([layer3_9_relu_1, layer3_9_relu_2], 1)
add_44 = layer3_9_relu_2.__add__(split_17[2])
layer3_9_convs_2 = self.layer3_9_convs_2(add_44)
layer3_9_bns_2 = self.layer3_9_bns_2(layer3_9_convs_2)
layer3_9_relu_3 = self.layer3_9_relu_3(layer3_9_bns_2)
cat_50 = torch.cat([cat_49, layer3_9_relu_3], 1)
cat_51 = torch.cat([cat_50, split_17[3]], 1)
layer3_9_conv3 = self.layer3_9_conv3(cat_51)
layer3_9_bn3 = self.layer3_9_bn3(layer3_9_conv3)
add_45 = layer3_9_bn3.__iadd__(layer3_8_relu_4)
layer3_9_relu_4 = self.layer3_9_relu_4(add_45)
layer3_10_conv1 = self.layer3_10_conv1(layer3_9_relu_4)
layer3_10_bn1 = self.layer3_10_bn1(layer3_10_conv1)
layer3_10_relu = self.layer3_10_relu(layer3_10_bn1)
split_18 = torch.split(layer3_10_relu, 104, 1)
layer3_10_convs_0 = self.layer3_10_convs_0(split_18[0])
layer3_10_bns_0 = self.layer3_10_bns_0(layer3_10_convs_0)
layer3_10_relu_1 = self.layer3_10_relu_1(layer3_10_bns_0)
add_46 = layer3_10_relu_1.__add__(split_18[1])
layer3_10_convs_1 = self.layer3_10_convs_1(add_46)
layer3_10_bns_1 = self.layer3_10_bns_1(layer3_10_convs_1)
layer3_10_relu_2 = self.layer3_10_relu_2(layer3_10_bns_1)
cat_52 = torch.cat([layer3_10_relu_1, layer3_10_relu_2], 1)
add_47 = layer3_10_relu_2.__add__(split_18[2])
layer3_10_convs_2 = self.layer3_10_convs_2(add_47)
layer3_10_bns_2 = self.layer3_10_bns_2(layer3_10_convs_2)
layer3_10_relu_3 = self.layer3_10_relu_3(layer3_10_bns_2)
cat_53 = torch.cat([cat_52, layer3_10_relu_3], 1)
cat_54 = torch.cat([cat_53, split_18[3]], 1)
layer3_10_conv3 = self.layer3_10_conv3(cat_54)
layer3_10_bn3 = self.layer3_10_bn3(layer3_10_conv3)
add_48 = layer3_10_bn3.__iadd__(layer3_9_relu_4)
layer3_10_relu_4 = self.layer3_10_relu_4(add_48)
layer3_11_conv1 = self.layer3_11_conv1(layer3_10_relu_4)
layer3_11_bn1 = self.layer3_11_bn1(layer3_11_conv1)
layer3_11_relu = self.layer3_11_relu(layer3_11_bn1)
split_19 = torch.split(layer3_11_relu, 104, 1)
layer3_11_convs_0 = self.layer3_11_convs_0(split_19[0])
layer3_11_bns_0 = self.layer3_11_bns_0(layer3_11_convs_0)
layer3_11_relu_1 = self.layer3_11_relu_1(layer3_11_bns_0)
add_49 = layer3_11_relu_1.__add__(split_19[1])
layer3_11_convs_1 = self.layer3_11_convs_1(add_49)
layer3_11_bns_1 = self.layer3_11_bns_1(layer3_11_convs_1)
layer3_11_relu_2 = self.layer3_11_relu_2(layer3_11_bns_1)
cat_55 = torch.cat([layer3_11_relu_1, layer3_11_relu_2], 1)
add_50 = layer3_11_relu_2.__add__(split_19[2])
layer3_11_convs_2 = self.layer3_11_convs_2(add_50)
layer3_11_bns_2 = self.layer3_11_bns_2(layer3_11_convs_2)
layer3_11_relu_3 = self.layer3_11_relu_3(layer3_11_bns_2)
cat_56 = torch.cat([cat_55, layer3_11_relu_3], 1)
cat_57 = torch.cat([cat_56, split_19[3]], 1)
layer3_11_conv3 = self.layer3_11_conv3(cat_57)
layer3_11_bn3 = self.layer3_11_bn3(layer3_11_conv3)
add_51 = layer3_11_bn3.__iadd__(layer3_10_relu_4)
layer3_11_relu_4 = self.layer3_11_relu_4(add_51)
layer3_12_conv1 = self.layer3_12_conv1(layer3_11_relu_4)
layer3_12_bn1 = self.layer3_12_bn1(layer3_12_conv1)
layer3_12_relu = self.layer3_12_relu(layer3_12_bn1)
split_20 = torch.split(layer3_12_relu, 104, 1)
layer3_12_convs_0 = self.layer3_12_convs_0(split_20[0])
layer3_12_bns_0 = self.layer3_12_bns_0(layer3_12_convs_0)
layer3_12_relu_1 = self.layer3_12_relu_1(layer3_12_bns_0)
add_52 = layer3_12_relu_1.__add__(split_20[1])
layer3_12_convs_1 = self.layer3_12_convs_1(add_52)
layer3_12_bns_1 = self.layer3_12_bns_1(layer3_12_convs_1)
layer3_12_relu_2 = self.layer3_12_relu_2(layer3_12_bns_1)
cat_58 = torch.cat([layer3_12_relu_1, layer3_12_relu_2], 1)
add_53 = layer3_12_relu_2.__add__(split_20[2])
layer3_12_convs_2 = self.layer3_12_convs_2(add_53)
layer3_12_bns_2 = self.layer3_12_bns_2(layer3_12_convs_2)
layer3_12_relu_3 = self.layer3_12_relu_3(layer3_12_bns_2)
cat_59 = torch.cat([cat_58, layer3_12_relu_3], 1)
cat_60 = torch.cat([cat_59, split_20[3]], 1)
layer3_12_conv3 = self.layer3_12_conv3(cat_60)
layer3_12_bn3 = self.layer3_12_bn3(layer3_12_conv3)
add_54 = layer3_12_bn3.__iadd__(layer3_11_relu_4)
layer3_12_relu_4 = self.layer3_12_relu_4(add_54)
layer3_13_conv1 = self.layer3_13_conv1(layer3_12_relu_4)
layer3_13_bn1 = self.layer3_13_bn1(layer3_13_conv1)
layer3_13_relu = self.layer3_13_relu(layer3_13_bn1)
split_21 = torch.split(layer3_13_relu, 104, 1)
layer3_13_convs_0 = self.layer3_13_convs_0(split_21[0])
layer3_13_bns_0 = self.layer3_13_bns_0(layer3_13_convs_0)
layer3_13_relu_1 = self.layer3_13_relu_1(layer3_13_bns_0)
add_55 = layer3_13_relu_1.__add__(split_21[1])
layer3_13_convs_1 = self.layer3_13_convs_1(add_55)
layer3_13_bns_1 = self.layer3_13_bns_1(layer3_13_convs_1)
layer3_13_relu_2 = self.layer3_13_relu_2(layer3_13_bns_1)
cat_61 = torch.cat([layer3_13_relu_1, layer3_13_relu_2], 1)
add_56 = layer3_13_relu_2.__add__(split_21[2])
layer3_13_convs_2 = self.layer3_13_convs_2(add_56)
layer3_13_bns_2 = self.layer3_13_bns_2(layer3_13_convs_2)
layer3_13_relu_3 = self.layer3_13_relu_3(layer3_13_bns_2)
cat_62 = torch.cat([cat_61, layer3_13_relu_3], 1)
cat_63 = torch.cat([cat_62, split_21[3]], 1)
layer3_13_conv3 = self.layer3_13_conv3(cat_63)
layer3_13_bn3 = self.layer3_13_bn3(layer3_13_conv3)
add_57 = layer3_13_bn3.__iadd__(layer3_12_relu_4)
layer3_13_relu_4 = self.layer3_13_relu_4(add_57)
layer3_14_conv1 = self.layer3_14_conv1(layer3_13_relu_4)
layer3_14_bn1 = self.layer3_14_bn1(layer3_14_conv1)
layer3_14_relu = self.layer3_14_relu(layer3_14_bn1)
split_22 = torch.split(layer3_14_relu, 104, 1)
layer3_14_convs_0 = self.layer3_14_convs_0(split_22[0])
layer3_14_bns_0 = self.layer3_14_bns_0(layer3_14_convs_0)
layer3_14_relu_1 = self.layer3_14_relu_1(layer3_14_bns_0)
add_58 = layer3_14_relu_1.__add__(split_22[1])
layer3_14_convs_1 = self.layer3_14_convs_1(add_58)
layer3_14_bns_1 = self.layer3_14_bns_1(layer3_14_convs_1)
layer3_14_relu_2 = self.layer3_14_relu_2(layer3_14_bns_1)
cat_64 = torch.cat([layer3_14_relu_1, layer3_14_relu_2], 1)
add_59 = layer3_14_relu_2.__add__(split_22[2])
layer3_14_convs_2 = self.layer3_14_convs_2(add_59)
layer3_14_bns_2 = self.layer3_14_bns_2(layer3_14_convs_2)
layer3_14_relu_3 = self.layer3_14_relu_3(layer3_14_bns_2)
cat_65 = torch.cat([cat_64, layer3_14_relu_3], 1)
cat_66 = torch.cat([cat_65, split_22[3]], 1)
layer3_14_conv3 = self.layer3_14_conv3(cat_66)
layer3_14_bn3 = self.layer3_14_bn3(layer3_14_conv3)
add_60 = layer3_14_bn3.__iadd__(layer3_13_relu_4)
layer3_14_relu_4 = self.layer3_14_relu_4(add_60)
layer3_15_conv1 = self.layer3_15_conv1(layer3_14_relu_4)
layer3_15_bn1 = self.layer3_15_bn1(layer3_15_conv1)
layer3_15_relu = self.layer3_15_relu(layer3_15_bn1)
split_23 = torch.split(layer3_15_relu, 104, 1)
layer3_15_convs_0 = self.layer3_15_convs_0(split_23[0])
layer3_15_bns_0 = self.layer3_15_bns_0(layer3_15_convs_0)
layer3_15_relu_1 = self.layer3_15_relu_1(layer3_15_bns_0)
add_61 = layer3_15_relu_1.__add__(split_23[1])
layer3_15_convs_1 = self.layer3_15_convs_1(add_61)
layer3_15_bns_1 = self.layer3_15_bns_1(layer3_15_convs_1)
layer3_15_relu_2 = self.layer3_15_relu_2(layer3_15_bns_1)
cat_67 = torch.cat([layer3_15_relu_1, layer3_15_relu_2], 1)
add_62 = layer3_15_relu_2.__add__(split_23[2])
layer3_15_convs_2 = self.layer3_15_convs_2(add_62)
layer3_15_bns_2 = self.layer3_15_bns_2(layer3_15_convs_2)
layer3_15_relu_3 = self.layer3_15_relu_3(layer3_15_bns_2)
cat_68 = torch.cat([cat_67, layer3_15_relu_3], 1)
cat_69 = torch.cat([cat_68, split_23[3]], 1)
layer3_15_conv3 = self.layer3_15_conv3(cat_69)
layer3_15_bn3 = self.layer3_15_bn3(layer3_15_conv3)
add_63 = layer3_15_bn3.__iadd__(layer3_14_relu_4)
layer3_15_relu_4 = self.layer3_15_relu_4(add_63)
layer3_16_conv1 = self.layer3_16_conv1(layer3_15_relu_4)
layer3_16_bn1 = self.layer3_16_bn1(layer3_16_conv1)
layer3_16_relu = self.layer3_16_relu(layer3_16_bn1)
split_24 = torch.split(layer3_16_relu, 104, 1)
layer3_16_convs_0 = self.layer3_16_convs_0(split_24[0])
layer3_16_bns_0 = self.layer3_16_bns_0(layer3_16_convs_0)
layer3_16_relu_1 = self.layer3_16_relu_1(layer3_16_bns_0)
add_64 = layer3_16_relu_1.__add__(split_24[1])
layer3_16_convs_1 = self.layer3_16_convs_1(add_64)
layer3_16_bns_1 = self.layer3_16_bns_1(layer3_16_convs_1)
layer3_16_relu_2 = self.layer3_16_relu_2(layer3_16_bns_1)
cat_70 = torch.cat([layer3_16_relu_1, layer3_16_relu_2], 1)
add_65 = layer3_16_relu_2.__add__(split_24[2])
layer3_16_convs_2 = self.layer3_16_convs_2(add_65)
layer3_16_bns_2 = self.layer3_16_bns_2(layer3_16_convs_2)
layer3_16_relu_3 = self.layer3_16_relu_3(layer3_16_bns_2)
cat_71 = torch.cat([cat_70, layer3_16_relu_3], 1)
cat_72 = torch.cat([cat_71, split_24[3]], 1)
layer3_16_conv3 = self.layer3_16_conv3(cat_72)
layer3_16_bn3 = self.layer3_16_bn3(layer3_16_conv3)
add_66 = layer3_16_bn3.__iadd__(layer3_15_relu_4)
layer3_16_relu_4 = self.layer3_16_relu_4(add_66)
layer3_17_conv1 = self.layer3_17_conv1(layer3_16_relu_4)
layer3_17_bn1 = self.layer3_17_bn1(layer3_17_conv1)
layer3_17_relu = self.layer3_17_relu(layer3_17_bn1)
split_25 = torch.split(layer3_17_relu, 104, 1)
layer3_17_convs_0 = self.layer3_17_convs_0(split_25[0])
layer3_17_bns_0 = self.layer3_17_bns_0(layer3_17_convs_0)
layer3_17_relu_1 = self.layer3_17_relu_1(layer3_17_bns_0)
add_67 = layer3_17_relu_1.__add__(split_25[1])
layer3_17_convs_1 = self.layer3_17_convs_1(add_67)
layer3_17_bns_1 = self.layer3_17_bns_1(layer3_17_convs_1)
layer3_17_relu_2 = self.layer3_17_relu_2(layer3_17_bns_1)
cat_73 = torch.cat([layer3_17_relu_1, layer3_17_relu_2], 1)
add_68 = layer3_17_relu_2.__add__(split_25[2])
layer3_17_convs_2 = self.layer3_17_convs_2(add_68)
layer3_17_bns_2 = self.layer3_17_bns_2(layer3_17_convs_2)
layer3_17_relu_3 = self.layer3_17_relu_3(layer3_17_bns_2)
cat_74 = torch.cat([cat_73, layer3_17_relu_3], 1)
cat_75 = torch.cat([cat_74, split_25[3]], 1)
layer3_17_conv3 = self.layer3_17_conv3(cat_75)
layer3_17_bn3 = self.layer3_17_bn3(layer3_17_conv3)
add_69 = layer3_17_bn3.__iadd__(layer3_16_relu_4)
layer3_17_relu_4 = self.layer3_17_relu_4(add_69)
layer3_18_conv1 = self.layer3_18_conv1(layer3_17_relu_4)
layer3_18_bn1 = self.layer3_18_bn1(layer3_18_conv1)
layer3_18_relu = self.layer3_18_relu(layer3_18_bn1)
split_26 = torch.split(layer3_18_relu, 104, 1)
layer3_18_convs_0 = self.layer3_18_convs_0(split_26[0])
layer3_18_bns_0 = self.layer3_18_bns_0(layer3_18_convs_0)
layer3_18_relu_1 = self.layer3_18_relu_1(layer3_18_bns_0)
add_70 = layer3_18_relu_1.__add__(split_26[1])
layer3_18_convs_1 = self.layer3_18_convs_1(add_70)
layer3_18_bns_1 = self.layer3_18_bns_1(layer3_18_convs_1)
layer3_18_relu_2 = self.layer3_18_relu_2(layer3_18_bns_1)
cat_76 = torch.cat([layer3_18_relu_1, layer3_18_relu_2], 1)
add_71 = layer3_18_relu_2.__add__(split_26[2])
layer3_18_convs_2 = self.layer3_18_convs_2(add_71)
layer3_18_bns_2 = self.layer3_18_bns_2(layer3_18_convs_2)
layer3_18_relu_3 = self.layer3_18_relu_3(layer3_18_bns_2)
cat_77 = torch.cat([cat_76, layer3_18_relu_3], 1)
cat_78 = torch.cat([cat_77, split_26[3]], 1)
layer3_18_conv3 = self.layer3_18_conv3(cat_78)
layer3_18_bn3 = self.layer3_18_bn3(layer3_18_conv3)
add_72 = layer3_18_bn3.__iadd__(layer3_17_relu_4)
layer3_18_relu_4 = self.layer3_18_relu_4(add_72)
layer3_19_conv1 = self.layer3_19_conv1(layer3_18_relu_4)
layer3_19_bn1 = self.layer3_19_bn1(layer3_19_conv1)
layer3_19_relu = self.layer3_19_relu(layer3_19_bn1)
split_27 = torch.split(layer3_19_relu, 104, 1)
layer3_19_convs_0 = self.layer3_19_convs_0(split_27[0])
layer3_19_bns_0 = self.layer3_19_bns_0(layer3_19_convs_0)
layer3_19_relu_1 = self.layer3_19_relu_1(layer3_19_bns_0)
add_73 = layer3_19_relu_1.__add__(split_27[1])
layer3_19_convs_1 = self.layer3_19_convs_1(add_73)
layer3_19_bns_1 = self.layer3_19_bns_1(layer3_19_convs_1)
layer3_19_relu_2 = self.layer3_19_relu_2(layer3_19_bns_1)
cat_79 = torch.cat([layer3_19_relu_1, layer3_19_relu_2], 1)
add_74 = layer3_19_relu_2.__add__(split_27[2])
layer3_19_convs_2 = self.layer3_19_convs_2(add_74)
layer3_19_bns_2 = self.layer3_19_bns_2(layer3_19_convs_2)
layer3_19_relu_3 = self.layer3_19_relu_3(layer3_19_bns_2)
cat_80 = torch.cat([cat_79, layer3_19_relu_3], 1)
cat_81 = torch.cat([cat_80, split_27[3]], 1)
layer3_19_conv3 = self.layer3_19_conv3(cat_81)
layer3_19_bn3 = self.layer3_19_bn3(layer3_19_conv3)
add_75 = layer3_19_bn3.__iadd__(layer3_18_relu_4)
layer3_19_relu_4 = self.layer3_19_relu_4(add_75)
layer3_20_conv1 = self.layer3_20_conv1(layer3_19_relu_4)
layer3_20_bn1 = self.layer3_20_bn1(layer3_20_conv1)
layer3_20_relu = self.layer3_20_relu(layer3_20_bn1)
split_28 = torch.split(layer3_20_relu, 104, 1)
layer3_20_convs_0 = self.layer3_20_convs_0(split_28[0])
layer3_20_bns_0 = self.layer3_20_bns_0(layer3_20_convs_0)
layer3_20_relu_1 = self.layer3_20_relu_1(layer3_20_bns_0)
add_76 = layer3_20_relu_1.__add__(split_28[1])
layer3_20_convs_1 = self.layer3_20_convs_1(add_76)
layer3_20_bns_1 = self.layer3_20_bns_1(layer3_20_convs_1)
layer3_20_relu_2 = self.layer3_20_relu_2(layer3_20_bns_1)
cat_82 = torch.cat([layer3_20_relu_1, layer3_20_relu_2], 1)
add_77 = layer3_20_relu_2.__add__(split_28[2])
layer3_20_convs_2 = self.layer3_20_convs_2(add_77)
layer3_20_bns_2 = self.layer3_20_bns_2(layer3_20_convs_2)
layer3_20_relu_3 = self.layer3_20_relu_3(layer3_20_bns_2)
cat_83 = torch.cat([cat_82, layer3_20_relu_3], 1)
cat_84 = torch.cat([cat_83, split_28[3]], 1)
layer3_20_conv3 = self.layer3_20_conv3(cat_84)
layer3_20_bn3 = self.layer3_20_bn3(layer3_20_conv3)
add_78 = layer3_20_bn3.__iadd__(layer3_19_relu_4)
layer3_20_relu_4 = self.layer3_20_relu_4(add_78)
layer3_21_conv1 = self.layer3_21_conv1(layer3_20_relu_4)
layer3_21_bn1 = self.layer3_21_bn1(layer3_21_conv1)
layer3_21_relu = self.layer3_21_relu(layer3_21_bn1)
split_29 = torch.split(layer3_21_relu, 104, 1)
layer3_21_convs_0 = self.layer3_21_convs_0(split_29[0])
layer3_21_bns_0 = self.layer3_21_bns_0(layer3_21_convs_0)
layer3_21_relu_1 = self.layer3_21_relu_1(layer3_21_bns_0)
add_79 = layer3_21_relu_1.__add__(split_29[1])
layer3_21_convs_1 = self.layer3_21_convs_1(add_79)
layer3_21_bns_1 = self.layer3_21_bns_1(layer3_21_convs_1)
layer3_21_relu_2 = self.layer3_21_relu_2(layer3_21_bns_1)
cat_85 = torch.cat([layer3_21_relu_1, layer3_21_relu_2], 1)
add_80 = layer3_21_relu_2.__add__(split_29[2])
layer3_21_convs_2 = self.layer3_21_convs_2(add_80)
layer3_21_bns_2 = self.layer3_21_bns_2(layer3_21_convs_2)
layer3_21_relu_3 = self.layer3_21_relu_3(layer3_21_bns_2)
cat_86 = torch.cat([cat_85, layer3_21_relu_3], 1)
cat_87 = torch.cat([cat_86, split_29[3]], 1)
layer3_21_conv3 = self.layer3_21_conv3(cat_87)
layer3_21_bn3 = self.layer3_21_bn3(layer3_21_conv3)
add_81 = layer3_21_bn3.__iadd__(layer3_20_relu_4)
layer3_21_relu_4 = self.layer3_21_relu_4(add_81)
layer3_22_conv1 = self.layer3_22_conv1(layer3_21_relu_4)
layer3_22_bn1 = self.layer3_22_bn1(layer3_22_conv1)
layer3_22_relu = self.layer3_22_relu(layer3_22_bn1)
split_30 = torch.split(layer3_22_relu, 104, 1)
layer3_22_convs_0 = self.layer3_22_convs_0(split_30[0])
layer3_22_bns_0 = self.layer3_22_bns_0(layer3_22_convs_0)
layer3_22_relu_1 = self.layer3_22_relu_1(layer3_22_bns_0)
add_82 = layer3_22_relu_1.__add__(split_30[1])
layer3_22_convs_1 = self.layer3_22_convs_1(add_82)
layer3_22_bns_1 = self.layer3_22_bns_1(layer3_22_convs_1)
layer3_22_relu_2 = self.layer3_22_relu_2(layer3_22_bns_1)
cat_88 = torch.cat([layer3_22_relu_1, layer3_22_relu_2], 1)
add_83 = layer3_22_relu_2.__add__(split_30[2])
layer3_22_convs_2 = self.layer3_22_convs_2(add_83)
layer3_22_bns_2 = self.layer3_22_bns_2(layer3_22_convs_2)
layer3_22_relu_3 = self.layer3_22_relu_3(layer3_22_bns_2)
cat_89 = torch.cat([cat_88, layer3_22_relu_3], 1)
cat_90 = torch.cat([cat_89, split_30[3]], 1)
layer3_22_conv3 = self.layer3_22_conv3(cat_90)
layer3_22_bn3 = self.layer3_22_bn3(layer3_22_conv3)
add_84 = layer3_22_bn3.__iadd__(layer3_21_relu_4)
layer3_22_relu_4 = self.layer3_22_relu_4(add_84)
layer4_0_conv1 = self.layer4_0_conv1(layer3_22_relu_4)
layer4_0_bn1 = self.layer4_0_bn1(layer4_0_conv1)
layer4_0_relu = self.layer4_0_relu(layer4_0_bn1)
split_31 = torch.split(layer4_0_relu, 208, 1)
layer4_0_convs_0 = self.layer4_0_convs_0(split_31[0])
layer4_0_bns_0 = self.layer4_0_bns_0(layer4_0_convs_0)
layer4_0_relu_1 = self.layer4_0_relu_1(layer4_0_bns_0)
layer4_0_convs_1 = self.layer4_0_convs_1(split_31[1])
layer4_0_bns_1 = self.layer4_0_bns_1(layer4_0_convs_1)
layer4_0_relu_2 = self.layer4_0_relu_2(layer4_0_bns_1)
cat_91 = torch.cat([layer4_0_relu_1, layer4_0_relu_2], 1)
layer4_0_convs_2 = self.layer4_0_convs_2(split_31[2])
layer4_0_bns_2 = self.layer4_0_bns_2(layer4_0_convs_2)
layer4_0_relu_3 = self.layer4_0_relu_3(layer4_0_bns_2)
cat_92 = torch.cat([cat_91, layer4_0_relu_3], 1)
layer4_0_pool = self.layer4_0_pool(split_31[3])
cat_93 = torch.cat([cat_92, layer4_0_pool], 1)
layer4_0_conv3 = self.layer4_0_conv3(cat_93)
layer4_0_bn3 = self.layer4_0_bn3(layer4_0_conv3)
layer4_0_downsample_0 = self.layer4_0_downsample_0(layer3_22_relu_4)
layer4_0_downsample_1 = self.layer4_0_downsample_1(layer4_0_downsample_0)
add_85 = layer4_0_bn3.__iadd__(layer4_0_downsample_1)
layer4_0_relu_4 = self.layer4_0_relu_4(add_85)
layer4_1_conv1 = self.layer4_1_conv1(layer4_0_relu_4)
layer4_1_bn1 = self.layer4_1_bn1(layer4_1_conv1)
layer4_1_relu = self.layer4_1_relu(layer4_1_bn1)
split_32 = torch.split(layer4_1_relu, 208, 1)
layer4_1_convs_0 = self.layer4_1_convs_0(split_32[0])
layer4_1_bns_0 = self.layer4_1_bns_0(layer4_1_convs_0)
layer4_1_relu_1 = self.layer4_1_relu_1(layer4_1_bns_0)
add_86 = layer4_1_relu_1.__add__(split_32[1])
layer4_1_convs_1 = self.layer4_1_convs_1(add_86)
layer4_1_bns_1 = self.layer4_1_bns_1(layer4_1_convs_1)
layer4_1_relu_2 = self.layer4_1_relu_2(layer4_1_bns_1)
cat_94 = torch.cat([layer4_1_relu_1, layer4_1_relu_2], 1)
add_87 = layer4_1_relu_2.__add__(split_32[2])
layer4_1_convs_2 = self.layer4_1_convs_2(add_87)
layer4_1_bns_2 = self.layer4_1_bns_2(layer4_1_convs_2)
layer4_1_relu_3 = self.layer4_1_relu_3(layer4_1_bns_2)
cat_95 = torch.cat([cat_94, layer4_1_relu_3], 1)
cat_96 = torch.cat([cat_95, split_32[3]], 1)
layer4_1_conv3 = self.layer4_1_conv3(cat_96)
layer4_1_bn3 = self.layer4_1_bn3(layer4_1_conv3)
add_88 = layer4_1_bn3.__iadd__(layer4_0_relu_4)
layer4_1_relu_4 = self.layer4_1_relu_4(add_88)
layer4_2_conv1 = self.layer4_2_conv1(layer4_1_relu_4)
layer4_2_bn1 = self.layer4_2_bn1(layer4_2_conv1)
layer4_2_relu = self.layer4_2_relu(layer4_2_bn1)
split_33 = torch.split(layer4_2_relu, 208, 1)
layer4_2_convs_0 = self.layer4_2_convs_0(split_33[0])
layer4_2_bns_0 = self.layer4_2_bns_0(layer4_2_convs_0)
layer4_2_relu_1 = self.layer4_2_relu_1(layer4_2_bns_0)
add_89 = layer4_2_relu_1.__add__(split_33[1])
layer4_2_convs_1 = self.layer4_2_convs_1(add_89)
layer4_2_bns_1 = self.layer4_2_bns_1(layer4_2_convs_1)
layer4_2_relu_2 = self.layer4_2_relu_2(layer4_2_bns_1)
cat_97 = torch.cat([layer4_2_relu_1, layer4_2_relu_2], 1)
add_90 = layer4_2_relu_2.__add__(split_33[2])
layer4_2_convs_2 = self.layer4_2_convs_2(add_90)
layer4_2_bns_2 = self.layer4_2_bns_2(layer4_2_convs_2)
layer4_2_relu_3 = self.layer4_2_relu_3(layer4_2_bns_2)
cat_98 = torch.cat([cat_97, layer4_2_relu_3], 1)
cat_99 = torch.cat([cat_98, split_33[3]], 1)
layer4_2_conv3 = self.layer4_2_conv3(cat_99)
layer4_2_bn3 = self.layer4_2_bn3(layer4_2_conv3)
add_91 = layer4_2_bn3.__iadd__(layer4_1_relu_4)
layer4_2_relu_4 = self.layer4_2_relu_4(add_91)
avgpool = self.avgpool(layer4_2_relu_4)
size_1 = avgpool.size(0)
view_1 = avgpool.view(size_1, -1)
fc = self.fc(view_1)
return fc
if __name__ == "__main__":
model = res2net101_26w_4s()
model.eval()
model.cpu()
dummy_input_0 = torch.ones((1, 3, 224, 224), dtype=torch.float32)
output = model(dummy_input_0)
print(output)