tensorflow_ranking/python/keras/losses.py [322:379]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  r"""Computes pairwise logistic loss between `y_true` and `y_pred`.

  For each list of scores `s` in `y_pred` and list of labels `y` in `y_true`:

  ```
  loss = sum_i sum_j I[y_i > y_j] * log(1 + exp(-(s_i - s_j)))
  ```

  Standalone usage:

  >>> y_true = [[1., 0.]]
  >>> y_pred = [[0.6, 0.8]]
  >>> loss = tfr.keras.losses.PairwiseLogisticLoss()
  >>> loss(y_true, y_pred).numpy()
  0.39906943

  >>> # Using ragged tensors
  >>> y_true = tf.ragged.constant([[1., 0.], [0., 1., 0.]])
  >>> y_pred = tf.ragged.constant([[0.6, 0.8], [0.5, 0.8, 0.4]])
  >>> loss = tfr.keras.losses.PairwiseLogisticLoss(ragged=True)
  >>> loss(y_true, y_pred).numpy()
  0.3109182

  Usage with the `compile()` API:

  ```python
  model.compile(optimizer='sgd', loss=tfr.keras.losses.PairwiseLogisticLoss())
  ```

  Definition:

  $$
  \mathcal{L}(\{y\}, \{s\}) =
  \sum_i \sum_j I[y_i > y_j] \log(1 + \exp(-(s_i - s_j)))
  $$
  """

  def __init__(self,
               reduction=tf.losses.Reduction.AUTO,
               name=None,
               lambda_weight=None,
               temperature=1.0,
               ragged=False):
    """Pairwise logistic loss.

    Args:
      reduction: (Optional) The `tf.keras.losses.Reduction` to use (see
        `tf.keras.losses.Loss`).
      name: (Optional) The name for the op.
      lambda_weight: (Optional) A lambdaweight to apply to the loss. Can be one
        of `tfr.keras.losses.DCGLambdaWeight`,
        `tfr.keras.losses.NDCGLambdaWeight`, or,
        `tfr.keras.losses.PrecisionLambdaWeight`.
      temperature: (Optional) The temperature to use for scaling the logits.
      ragged: (Optional) If True, this loss will accept ragged tensors. If
        False, this loss will accept dense tensors.
    """
    super().__init__(reduction, name, lambda_weight, temperature, ragged)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_ranking/python/keras/losses.py [389:447]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  r"""Computes pairwise soft zero-one loss between `y_true` and `y_pred`.

  For each list of scores `s` in `y_pred` and list of labels `y` in `y_true`:

  ```
  loss = sum_i sum_j I[y_i > y_j] * (1 - sigmoid(s_i - s_j))
  ```

  Standalone usage:

  >>> y_true = [[1., 0.]]
  >>> y_pred = [[0.6, 0.8]]
  >>> loss = tfr.keras.losses.PairwiseSoftZeroOneLoss()
  >>> loss(y_true, y_pred).numpy()
  0.274917

  >>> # Using ragged tensors
  >>> y_true = tf.ragged.constant([[1., 0.], [0., 1., 0.]])
  >>> y_pred = tf.ragged.constant([[0.6, 0.8], [0.5, 0.8, 0.4]])
  >>> loss = tfr.keras.losses.PairwiseSoftZeroOneLoss(ragged=True)
  >>> loss(y_true, y_pred).numpy()
  0.22945064

  Usage with the `compile()` API:

  ```python
  model.compile(optimizer='sgd',
                loss=tfr.keras.losses.PairwiseSoftZeroOneLoss())
  ```

  Definition:

  $$
  \mathcal{L}(\{y\}, \{s\}) =
  \sum_i \sum_j I[y_i > y_j] (1 - \text{sigmoid}(s_i - s_j))
  $$
  """

  def __init__(self,
               reduction=tf.losses.Reduction.AUTO,
               name=None,
               lambda_weight=None,
               temperature=1.0,
               ragged=False):
    """Pairwise soft zero one loss.

    Args:
      reduction: (Optional) The `tf.keras.losses.Reduction` to use (see
        `tf.keras.losses.Loss`).
      name: (Optional) The name for the op.
      lambda_weight: (Optional) A lambdaweight to apply to the loss. Can be one
        of `tfr.keras.losses.DCGLambdaWeight`,
        `tfr.keras.losses.NDCGLambdaWeight`, or,
        `tfr.keras.losses.PrecisionLambdaWeight`.
      temperature: (Optional) The temperature to use for scaling the logits.
      ragged: (Optional) If True, this loss will accept ragged tensors. If
        False, this loss will accept dense tensors.
    """
    super().__init__(reduction, name, lambda_weight, temperature, ragged)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



