def _ensure_unsupported_params_unchanged()

in tensorflow_recommenders/layers/embedding/tpu_embedding_layer.py [0:0]


def _ensure_unsupported_params_unchanged(
    optimizer_params, supported_params, unsupported_params):
  """Helper function to raise exception if an unsupported param was set.

  The unsupported params generally have default values which we cannot
  rely upon to be falsy. Instead of duplicating the default values here
  in a way that is likely to drift out of sync, we construct a second
  copy of the optimizer param object and diff the config fields.
  The parameters "clipnorm" and "clipvalue" are universally unsupported and
  undefined by default, so we check these directly.

  Args:
    optimizer_params: The Keras optimizer param object.
    supported_params: The list of config options on the Keras optimizer which
        we will pass to the constructor.
    unsupported_params: The list of config options which must not be set
        on the Keras optimizer.

  Raises:
    ValueError: if the Keras optimizer set a config option which the
        tpu_embedding optimizer does not support.
  """
  error_template = (
      "Optimizer parameter %s is unsupported for TPU embeddings. Please "
      "construct a new optimizer for embedding if you wish to use this "
      "setting for model training.")

  for attr in ["clipnorm", "clipvalue"]:
    if getattr(optimizer_params, attr, None) is not None:
      raise ValueError(error_template % attr)

  config = optimizer_params.get_config()
  constructor_args = {p: config[p] for p in supported_params}
  reference = optimizer_params.__class__(**constructor_args)
  reference_config = reference.get_config()
  for p in unsupported_params:
    if config[p] != reference_config[p]:
      raise ValueError(error_template % p)