def __init__()

in utils/gluon/utils/mobilenetv1.py [0:0]


    def __init__(self, multiplier=1.0, classes=1000,
                 ratio=0.,
                 norm_kwargs=None, final_drop=0.,
                 name_prefix=None, **kwargs):
        super(_MobileNetV1, self).__init__(prefix=name_prefix)
        # reference:
        # - Howard, Andrew G., et al.
        #   "Mobilenets: Efficient convolutional neural networks for mobile vision applications."
        #   arXiv preprint arXiv:1704.04861 (2017).
        dw_channels = [int(x * multiplier) for x in
                       [32, 64] + [128] * 2 + [256] * 2 + [512] * 6 + [1024]]
        channels = [int(x * multiplier) for x in
                       [64] + [128] * 2 + [256] * 2 + [512] * 6 + [1024] * 2]
        strides = [1, 2] * 3 + [1] * 5 + [2, 1]
        dw_ratios = [0.] + [ratio] * 10 + [0.] * 2
        pw_ratios = [ratio] * 10 + [0.] * 3

        with self.name_scope():
            self.conv1 = gluon.nn.HybridSequential()
            self.conv1.add(gluon.nn.Conv2D(channels=int(32 * multiplier),
                            kernel_size=3, padding=1, strides=2, use_bias=False,
                            prefix='conv1_'))
            self.conv1.add(gluon.nn.BatchNorm(prefix='bn1_',
                            **({} if norm_kwargs is None else norm_kwargs)))
            self.conv1.add(gluon.nn.Activation('relu'))

            stage_index, i = 1, 0
            for dwc, pwc, s, dr, pr in zip(dw_channels, channels, strides, dw_ratios, pw_ratios):
                stage_index += 1 if s > 1 else 0
                i = 0 if s > 1 else (i + 1)
                # -------------------------------------
                dwc = self._get_channles(dwc, dr)
                pwc = self._get_channles(pwc, pr)
                # -------------------------------------
                name = 'L%d_B%d' % (stage_index, i)
                setattr(self, name, _DWSConv(in_channels=dwc,
                                             channels=pwc, stride=s,
                                             norm_kwargs=None,
                                             name_prefix="%s_"%name))

            self.drop = gluon.nn.Dropout(final_drop) if final_drop > 0. else lambda x: (x)
            self.classifer = gluon.nn.Conv2D(in_channels=channels[-1], channels=classes,
                                       kernel_size=1, prefix='classifier_')
            self.flat = gluon.nn.Flatten()