def __init__()

in utils/gluon/utils/resnetv1.py [0:0]


    def __init__(self, block, layers, groups=1, multiplier=1.,
                 ratio=(0., 0., 0., 0.),
                 num_out=(256, 512, 1024, 2048),
                 num_mid=( 64, 128,  256,  512),
                 classes=1000, use_se=False, down_pos=0, replace_maxpool=None,
                 norm_kwargs=None, last_gamma=False, deep_stem=False,
                 final_drop=0., use_global_stats=False, extra_bn=False,
                 name_prefix='', **kwargs):
        super(_ResNetV1, self).__init__(prefix=name_prefix)
        norm_kwargs = norm_kwargs if norm_kwargs is not None else {}
        if use_global_stats:
            norm_kwargs['use_global_stats'] = True
        # initialize residual networks
        k = multiplier
        self.use_se = use_se
        self.extra_bn = extra_bn
        self.groups = groups
        self.down_pos=down_pos
        self.last_gamma = last_gamma
        self.norm_kwargs = norm_kwargs

        with self.name_scope():
            self.conv1 = gluon.nn.HybridSequential()
            if not deep_stem:
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*64), kernel_size=7, padding=3, strides=2,
                                         use_bias=False, prefix='conv1_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='bn1_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
            else:
                n1, s1, s2 = (32, 2, 1) if replace_maxpool is None else (8, 1, 2)
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*n1), kernel_size=3, padding=1, strides=s1,
                                         use_bias=False, prefix='stem_conv1_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn1_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*32), kernel_size=3, padding=1, strides=s2,
                                         use_bias=False, prefix='stem_conv2_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn2_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*64), kernel_size=3, padding=1, strides=1,
                                         use_bias=False, prefix='stem_conv3_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn3_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
            # ------------------------------------------------------------------
            if replace_maxpool is None:
                self.maxpool = gluon.nn.MaxPool2D(pool_size=3, strides=2, padding=1)
            elif replace_maxpool == '3x3':
                self.conv2 = gluon.nn.HybridSequential()
                self.conv2.add(gluon.nn.Conv2D(channels=int(k*64), kernel_size=3, padding=1, strides=2,
                                         use_bias=False, prefix='conv2_'))
                self.conv2.add(gluon.nn.BatchNorm(prefix='bn2_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv2.add(gluon.nn.Activation('relu'))
            elif replace_maxpool == 'bottleneck-131':
                self.conv2 = _bL_Stem(in_planes=int(k*64), mid_planes=int(k*32), out_planes=int(k*64),
                                         strides=2, norm_kwargs=norm_kwargs, last_gamma=last_gamma,
                                         kernel_sizes=(1,3,1), name_prefix='stem2_')
            else:
                raise NotImplementedError("replace_maxpool = {} is not implemented".format(replace_maxpool))
            # ------------------------------------------------------------------
            # customized convolution starts from this line
            self.inplanes = (int(k*64), -1) # convert to proposed data format
            self._make_layer(1, block, layers[0], int(k*num_out[0]), num_mid[0], ratio[0])
            self._make_layer(2, block, layers[1], int(k*num_out[1]), num_mid[1], ratio[1], strides=2)
            self._make_layer(3, block, layers[2], int(k*num_out[2]), num_mid[2], ratio[2], strides=2)
            self._make_layer(4, block, layers[3], int(k*num_out[3]), num_mid[3], ratio[3], strides=2)
            # ------------------------------------------------------------------
            self.avgpool = gluon.nn.GlobalAvgPool2D()
            self.drop = gluon.nn.Dropout(final_drop) if final_drop > 0. else lambda x: (x)
            self.classifer = gluon.nn.Conv2D(in_channels=int(k*num_out[3]), channels=classes,
                                       kernel_size=1, prefix='classifier_')
            self.flat = gluon.nn.Flatten()