def __init__()

in utils/gluon/utils/densenet.py [0:0]


    def __init__(self, num_init_features, growth_rate, block_config,
                 classes=1000, final_drop=0., ratio=(0., 0., 0., 0.),
                 norm_kwargs=None, name_prefix=None, **kwargs):
        super(DenseNet, self).__init__(prefix=name_prefix)
        with self.name_scope():
            in_plane = self._get_channles(num_init_features, ratio[0])
            self.conv1 = nn.Conv2D(in_channels=(3, -1), channels=in_plane,
                                     kernel_size=7, padding=3, strides=2,
                                     use_bias=False, prefix='conv1')
            self.bn1 = nn.BatchNorm(in_channels=in_plane, prefix='bn1',
                                     **({} if norm_kwargs is None else norm_kwargs))
            self.relu1 = nn.Activation('relu')
            # ------------------------------------------------------------------
            self.maxpool = nn.MaxPool2D(in_channels=in_plane,
                                     pool_size=3, strides=2, padding=1)
            # ------------------------------------------------------------------
            # Add dense blocks
            in_plane = num_init_features
            for i, num_blocks in enumerate(block_config):
                stage_index = i + 1
                block_index = 0
                for j in range(num_blocks):
                    # change dimension
                    if j == 0 and i > 0:
                        name = 'L%d_B%d' % (stage_index, block_index)
                        in_plane_t = self._get_channles(in_plane, ratio[i-1])
                        out_plane_t = self._get_channles(int(in_plane/2), ratio[i])
                        setattr(self, name, _Transition(in_channels=in_plane_t,
                                                        out_channels=out_plane_t,
                                                        norm_kwargs=norm_kwargs,
                                                        name_prefix="%s_" % name))
                        block_index += 1
                        in_plane = int(in_plane/2)
                    # main part
                    name = 'L%d_B%d' % (stage_index, block_index)
                    in_plane_t = self._get_channles(in_plane, ratio[i])
                    out_plane_t = self._get_channles(growth_rate, ratio[i])
                    setattr(self, name, _DenseBlock(in_channels=in_plane_t,
                                                    out_channels=out_plane_t,
                                                    norm_kwargs=norm_kwargs,
                                                    name_prefix="%s_" % name))
                    block_index += 1
                    in_plane += growth_rate
            # ------------------------------------------------------------------
            self.tail = gluon.nn.HybridSequential()
            self.tail.add(gluon.nn.BatchNorm(prefix='tail-bn_',
                                    **({} if norm_kwargs is None else norm_kwargs)))
            self.tail.add(gluon.nn.Activation('relu'))
            # ------------------------------------------------------------------
            self.avgpool = gluon.nn.GlobalAvgPool2D()
            self.drop = gluon.nn.Dropout(final_drop) if final_drop > 0. else lambda x: (x)
            self.classifer = gluon.nn.Conv2D(channels=classes,
                                       kernel_size=1, prefix='classifier_')
            self.flat = gluon.nn.Flatten()