utils/gluon/utils/resnetv1.py [234:242]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                                         use_bias=False, prefix='stem_conv2_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn2_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*64), kernel_size=3, padding=1, strides=1,
                                         use_bias=False, prefix='stem_conv3_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn3_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



utils/gluon/utils/resnetv2.py [152:160]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                                         use_bias=False, prefix='stem_conv2_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn2_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
                self.conv1.add(gluon.nn.Conv2D(channels=int(k*64), kernel_size=3, padding=1, strides=1,
                                         use_bias=False, prefix='stem_conv3_'))
                self.conv1.add(gluon.nn.BatchNorm(prefix='stem_bn3_',
                                         **({} if norm_kwargs is None else norm_kwargs)))
                self.conv1.add(gluon.nn.Activation('relu'))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



