tensorflow_model_optimization/python/core/quantization/keras/default_8bit/default_8bit_quantize_registry.py [470:490]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
          '`keras.layers.Activation`.')

  def get_weights_and_quantizers(self, layer):
    self._assert_activation_layer(layer)
    return []

  def get_activations_and_quantizers(self, layer):
    self._assert_activation_layer(layer)
    return []

  def set_quantize_weights(self, layer, quantize_weights):
    self._assert_activation_layer(layer)

  def set_quantize_activations(self, layer, quantize_activations):
    self._assert_activation_layer(layer)

  def get_output_quantizers(self, layer):
    self._assert_activation_layer(layer)

    if not hasattr(layer.activation, '__name__'):
      raise ValueError('Activation {} not supported by '
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_model_optimization/python/core/quantization/keras/experimental/default_n_bit/default_n_bit_quantize_registry.py [515:535]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
          '`keras.layers.Activation`.')

  def get_weights_and_quantizers(self, layer):
    self._assert_activation_layer(layer)
    return []

  def get_activations_and_quantizers(self, layer):
    self._assert_activation_layer(layer)
    return []

  def set_quantize_weights(self, layer, quantize_weights):
    self._assert_activation_layer(layer)

  def set_quantize_activations(self, layer, quantize_activations):
    self._assert_activation_layer(layer)

  def get_output_quantizers(self, layer):
    self._assert_activation_layer(layer)

    if not hasattr(layer.activation, '__name__'):
      raise ValueError('Activation {} not supported by '
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



