tensorflow_model_optimization/python/core/quantization/keras/layers/conv_batchnorm_test_utils.py [63:84]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  @classmethod
  def get_nonfolded_batchnorm_model(cls,
                                    post_bn_activation=None,
                                    model_type='sequential',
                                    random_init=False,
                                    squeeze_type=False,
                                    normalization_type='BatchNormalization'):
    """Return nonfolded Conv2D + BN + optional activation model."""
    if normalization_type == 'BatchNormalization':
      normalization = keras.layers.BatchNormalization
    elif normalization_type == 'SyncBatchNormalization':
      normalization = keras.layers.experimental.SyncBatchNormalization

    if squeeze_type == 'sepconv1d_squeeze':
      squeeze_layer = tf.keras.layers.Lambda(
          lambda x: tf.squeeze(x, [1]), name='sepconv1d_squeeze_1')
    else:
      squeeze_layer = None

    if model_type == 'sequential':
      layers = []
      layers.append(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_model_optimization/python/core/quantization/keras/layers/conv_batchnorm_test_utils.py [125:145]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  @classmethod
  def get_nonfolded_batchnorm_model(cls,
                                    post_bn_activation=None,
                                    model_type='sequential',
                                    random_init=False,
                                    squeeze_type=False,
                                    normalization_type='BatchNormalization'):
    if normalization_type == 'BatchNormalization':
      normalization = keras.layers.BatchNormalization
    elif normalization_type == 'SyncBatchNormalization':
      normalization = keras.layers.experimental.SyncBatchNormalization

    if squeeze_type == 'sepconv1d_squeeze':
      squeeze_layer = tf.keras.layers.Lambda(
          lambda x: tf.squeeze(x, [1]), name='sepconv1d_squeeze_1')
    else:
      squeeze_layer = None

    if model_type == 'sequential':
      layers = []
      layers.append(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



