def _clone_and_prepare_features()

in tensorflow_recommenders/layers/embedding/tpu_embedding_layer.py [0:0]


def _clone_and_prepare_features(feature_config):
  """Prepares a nested structure of FeatureConfig objects for mid level api.

  Clones the feature_config structure and its contained
  `tf.tpu.experimental.embedding.TableConfig` objects. This is done so that
  TPUEmbedding layer doesn't touch the user's original configuration.

  Args:
    feature_config: A nested structure of
      `tf.tpu.experimental.embedding.FeatureConfig` objects.

  Returns:
    A nested structure of
    `tf.tpu.experimental.embedding.FeatureConfig` objects and list of tuples
    mapping user `tf.tpu.experimental.embedding.TableConfig` objects to the
    internal ones.
  """
  output_objects = []

  table_configs = {}

  for config in tf.nest.flatten(feature_config):
    # There should be a one-to-one mapping between new TableConfig objects and
    # old ones (as each TableConfig can be thought of as a table).
    table_configs[config.table] = table_configs.get(
        config.table,
        tf.tpu.experimental.embedding.TableConfig(
            vocabulary_size=config.table.vocabulary_size,
            dim=config.table.dim,
            initializer=config.table.initializer,
            optimizer=config.table.optimizer,
            combiner=config.table.combiner,
            name=config.table.name))

    output_objects.append(
        tf.tpu.experimental.embedding.FeatureConfig(
            table=table_configs[config.table],
            max_sequence_length=config.max_sequence_length,
            validate_weights_and_indices=config.validate_weights_and_indices,
            name=config.name))

  # Fix up the optimizers.
  for _, new_table in table_configs.items():
    if new_table.optimizer is not None:
      new_table.optimizer = _normalize_and_prepare_optimizer(
          new_table.optimizer)

  return (tf.nest.pack_sequence_as(feature_config, output_objects),
          list(table_configs.items()))