def get_config()

in tensorflow_hub/keras_layer.py [0:0]


  def get_config(self):
    """Returns a serializable dict of keras layer configuration parameters."""
    config = super().get_config()
    if not isinstance(self._handle, str):
      # Need to raise this type in order for tf.saved_model.save() to fall back
      # to not using config, instead of crashing.
      # TODO(b/134528831): Reconsider the usability implications.
      raise NotImplementedError(
          "Can only generate a valid config for `hub.KerasLayer(handle, ...)`"
          "that uses a string `handle`.\n\n"
          "Got `type(handle)`: {}".format(type(self._handle)))
    config["handle"] = self._handle

    if hasattr(self, "_output_shape"):
      output_shape = _convert_nest_from_shapes(self._output_shape)
      try:
        json.dumps(output_shape)
      except TypeError:
        raise ValueError(
            "hub.KerasLayer(..., output_shape=) is not json-serializable.\n"
            "Got value: {}".format(output_shape))
      config["output_shape"] = output_shape

    if self._arguments:
      # Raise clear errors for non-serializable arguments.
      for key, value in self._arguments.items():
        try:
          json.dumps(value)
        except TypeError:
          raise ValueError(
              "`hub.KerasLayer(..., arguments)` contains non json-serializable"
              "values in key: {}".format(key))
      config["arguments"] = self._arguments

    if self._signature:
      config["signature"] = self._signature
    if self._output_key:
      config["output_key"] = self._output_key
    if self._signature_outputs_as_dict:
      config["signature_outputs_as_dict"] = self._signature_outputs_as_dict

    # self._load_options is not stored in the config. Instead, the load
    # options passed at the time when this layer gets reloaded from its config
    # are applied to its own loading as well. That is because the only
    # load option available at this time (July 2020) is
    # `experimental_io_device`, which relates to the loading environment,
    # and not to the interpretation of the loaded SavedModel.

    return config