def encode_features()

in tensorflow_ranking/python/feature.py [0:0]


def encode_features(features,
                    feature_columns,
                    mode=tf.estimator.ModeKeys.TRAIN,
                    scope=None):
  """Returns dense tensors from features using feature columns.

  This function encodes the feature column transformation on the 'raw'
  `features`.


  Args:
    features: (dict) mapping feature names to feature values, possibly obtained
      from input_fn.
    feature_columns: (list)  list of feature columns.
    mode: (`estimator.ModeKeys`) Specifies if this is training, evaluation or
      inference. See `ModeKeys`.
    scope: (str) variable scope for the per column input layers.

  Returns:
    (dict) A mapping from columns to dense tensors.
  """
  # Having scope here for backward compatibility.
  del scope
  trainable = (mode == tf.estimator.ModeKeys.TRAIN)
  cols_to_tensors = {}

  # TODO: Ensure only v2 Feature Columns are used.
  if hasattr(feature_column_lib, "is_feature_column_v2"
            ) and feature_column_lib.is_feature_column_v2(feature_columns):
    dense_feature_columns = [
        col for col in feature_columns if not _is_sequence_column_v2(col)
    ]
    sequence_feature_columns = [
        col for col in feature_columns if _is_sequence_column_v2(col)
    ]

    if dense_feature_columns:
      dense_layer = tf.compat.v1.keras.layers.DenseFeatures(
          feature_columns=dense_feature_columns,
          name="encoding_layer",
          trainable=trainable)
      dense_layer(features, cols_to_output_tensors=cols_to_tensors)

    for col in sequence_feature_columns:
      sequence_feature_layer = tf.keras.experimental.SequenceFeatures(col)
      sequence_input, _ = sequence_feature_layer(features)
      cols_to_tensors[col] = sequence_input
  else:
    tf.compat.v1.feature_column.input_layer(
        features=features,
        feature_columns=feature_columns,
        trainable=trainable,
        cols_to_output_tensors=cols_to_tensors)

  return cols_to_tensors