in tensorflow_ranking/python/estimator.py [0:0]
def __init__(self,
context_feature_columns,
example_feature_columns,
scoring_function,
transform_function=None,
optimizer=None,
loss_reduction=None,
hparams=None):
"""Constructor.
Args:
context_feature_columns: (dict) Context (aka, query) feature columns.
example_feature_columns: (dict) Example (aka, document) feature columns.
scoring_function: (function) A user-provided scoring function with the
below signatures:
* Args:
`context_features`: (dict) A dict of Tensors with the shape
[batch_size, ...].
`example_features`: (dict) A dict of Tensors with the shape
[batch_size, ...].
`mode`: (`estimator.ModeKeys`) Specifies if this is for training,
evaluation or inference. See ModeKeys.
* Returns: The computed logits, a Tensor of shape [batch_size, 1].
transform_function: (function) A user-provided function that transforms
raw features into dense Tensors with the following signature:
* Args:
`features`: (dict) A dict of Tensors or SparseTensors containing the
raw features from an `input_fn`.
`mode`: (`estimator.ModeKeys`) Specifies if this is for training,
evaluation or inference. See ModeKeys.
* Returns:
`context_features`: (dict) A dict of Tensors with the shape
[batch_size, ...].
`example_features`: (dict) A dict of Tensors with the shape
[batch_size, list_size, ...].
optimizer: (`tf.Optimizer`) An `Optimizer` object for model optimzation.
loss_reduction: (str) An enum of strings indicating the loss reduction
type. See type definition in the `tf.compat.v1.losses.Reduction`.
hparams: (dict) A dict containing model hyperparameters.
Raises:
ValueError: If the `example_feature_columns` is None.
ValueError: If the `scoring_function` is None..
ValueError: If both the `optimizer` and the `hparams["learning_rate"]`
are not specified.
"""
if example_feature_columns is None:
raise ValueError("The `example_feature_columns` is not specified!")
if scoring_function is None:
raise ValueError("The `scoring_function` needs to be specified!")
self._context_feature_columns = context_feature_columns
self._example_feature_columns = example_feature_columns
self._scoring_function = scoring_function
self._transform_function = transform_function
self._hparams = hparams
self._validate_function_args_and_hparams()
if not optimizer and not hparams.get("learning_rate"):
raise ValueError("Please specify either the `optimizer` or the "
"`learning_rate` in `hparams`!")
if optimizer and hparams.get("learning_rate"):
tf.compat.v1.logging.warning("`learning_rate` from `hparams` is ignored "
"as the `optimizer` has been specified!")
self._optimizer = (
optimizer or tf.compat.v1.train.AdagradOptimizer(
learning_rate=hparams.get("learning_rate")))
self._loss_reduction = loss_reduction or tf.compat.v1.losses.Reduction.SUM