in tensorflow/sagemakercv/detection/backbones/resnet.py [0:0]
def __init__(self, filters, trainable, finetune_bn, strides, use_projection=False, data_format='channels_last', norm_type='batchnorm'):
"""Standard building block for residual networks with BN after convolutions.
Args:
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
finetune_bn: `bool` for whether the model is in training.
strides: `int` block stride. If greater than 1, this block will ultimately downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
data_format: `str` either "channels_first" for `[batch, channels, height, width]`
or "channels_last for `[batch, height, width, channels]`.
"""
super(ResidualBlock, self).__init__(trainable=trainable)
#super(ResidualBlock, self).__init__()
self._finetune_bn = finetune_bn
self.norm_type = norm_type
self._local_layers = dict()
if use_projection:
self._local_layers["projection"] = dict()
self._local_layers["projection"]["conv2d"] = Conv2dFixedPadding(
filters=filters,
kernel_size=1,
strides=strides,
data_format=data_format,
trainable=trainable
)
if norm_type == 'batchnorm':
self._local_layers["projection"]["batchnorm"] = BNReLULayer(
trainable=finetune_bn and trainable,
relu=False,
init_zero=False,
data_format=data_format,
)
elif norm_type == 'groupnorm':
self._local_layers["projection"]["groupnorm"] = GNReLULayer(
trainable=True,
relu=False,
init_zero=False,
data_format=data_format,
)
else:
raise NotImplementedError
self._local_layers["conv2d_1"] = Conv2dFixedPadding(
trainable=trainable,
filters=filters,
kernel_size=3,
strides=strides,
data_format=data_format,
)
self._local_layers["conv2d_2"] = Conv2dFixedPadding(
trainable=trainable,
filters=filters,
kernel_size=3,
strides=1,
data_format=data_format,
)
if norm_type == 'batchnorm':
self._local_layers["batchnorm_1"] = BNReLULayer(
trainable=finetune_bn and trainable,
relu=True,
init_zero=False,
data_format=data_format,
)
self._local_layers["batchnorm_2"] = BNReLULayer(
trainable=finetune_bn and trainable,
relu=False,
init_zero=True,
data_format=data_format,
)
elif norm_type == 'groupnorm':
self._local_layers["groupnorm_1"] = GNReLULayer(
trainable=True,
relu=True,
init_zero=False,
data_format=data_format,
)
self._local_layers["groupnorm_2"] = GNReLULayer(
trainable=True,
relu=False,
init_zero=True,
data_format=data_format,
)
self._local_layers["activation"] = tf.keras.layers.ReLU()