def call()

in tensorflow_recommenders/layers/embedding/tpu_embedding_layer.py [0:0]


  def call(self, features, weights=None, serving_config=None):
    """Look up features in the embedding tables and combine using weights.

    Args:
      features: a nested structure of `Tensor`s, `SparseTensor`s or
        `RaggedTensor`s with the same structure as `feature_config`. These
        tensors are used as ids to lookup rows in the embedding tables using the
        config as specified in the corresponding entry of `feature_config`. You
        can mix `Tensor`s and `SparseTensor`s, or `Tensor`s and `RaggedTensor`s,
        but not `SparseTensor`s and `RaggedTensor`s.
      weights: None, or a nested structure of Tensor`s, `SparseTensor`s or
        `RaggedTensor`s or None matching features. These are the weights used
        when combining the looked up rows for a given feature and examples. If
        None, weights of 1 will be used.
      serving_config: A nested structure of
        `tf.tpu.experimental.embedding.FeatureConfig` objects. If not None, this
        layer uses CPU based lookup using serving_config and the current set of
        embedding tables.

    Returns:
      The combined embedding activations for the input ids passed in via
      features.

    Raises:
      RuntimeError: If layer is not created under a TPU strategy and is called
        under a TPU strategy.
    """
    if serving_config is not None:
      # The TableConfig objects in the serving_config should match the ones
      # passed to the layer when it was created. Since we cloned those, we
      # need to update to the new TableConfig objects. Use the stored mapping
      # to do this.
      serving_config = _update_table_configs(serving_config,
                                             self._table_config_map)
      return tf.tpu.experimental.embedding.serving_embedding_lookup(
          features, weights, self._tpu_embedding.embedding_tables,
          serving_config)

    if not self._using_tpu and _is_tpu_strategy(tf.distribute.get_strategy()):
      raise RuntimeError(f"Layer is created under strategy {self._strategy} "
                         "but is being called under a TPUStrategy. Please "
                         "create the layer under a TPUStrategy if you wish to "
                         "run the layer on TPU.")
    if self._using_tpu and not _is_tpu_strategy(tf.distribute.get_strategy()):
      raise RuntimeError(f"Layer is created under strategy {self._strategy} "
                         "but is being called under strategy "
                         f"{tf.distribute.get_strategy()}. Please use "
                         "strategy.run when calling this layer.")

    if self._using_tpu and _is_tpu_strategy(tf.distribute.get_strategy()):
      return self._tpu_embedding_lookup(features, weights)
    else:
      return tf.tpu.experimental.embedding.serving_embedding_lookup(
          features, weights, self._tpu_embedding.embedding_tables,
          self._feature_config)