tensorflow_ranking/python/keras/metrics.py [642:706]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  r"""Normalized discounted cumulative gain (NDCG).

  Normalized discounted cumulative gain ([Järvelin et al, 2002][jarvelin2002])
  is the normalized version of `tfr.keras.metrics.DCGMetric`.

  For each list of scores `s` in `y_pred` and list of labels `y` in `y_true`:

  ```
  NDCG(y, s) = DCG(y, s) / DCG(y, y)
  DCG(y, s) = sum_i gain(y_i) * rank_discount(rank(s_i))
  ```

  NOTE: The `gain_fn` and `rank_discount_fn` should be keras serializable.
  Please see `tfr.keras.utils.pow_minus_1` and `tfr.keras.utils.log2_inverse` as
  examples when defining user customized functions.

  Standalone usage:

  >>> y_true = [[0., 1., 1.]]
  >>> y_pred = [[3., 1., 2.]]
  >>> ndcg = tfr.keras.metrics.NDCGMetric()
  >>> ndcg(y_true, y_pred).numpy()
  0.6934264

  >>> # Using ragged tensors
  >>> y_true = tf.ragged.constant([[0., 1.], [1., 2., 0.]])
  >>> y_pred = tf.ragged.constant([[2., 1.], [2., 5., 4.]])
  >>> ndcg = tfr.keras.metrics.NDCGMetric(ragged=True)
  >>> ndcg(y_true, y_pred).numpy()
  0.7974351

  Usage with the `compile()` API:

  ```python
  model.compile(optimizer='sgd', metrics=[tfr.keras.metrics.NDCGMetric()])
  ```

  Definition:

  $$
  \text{NDCG}(\{y\}, \{s\}) =
  \frac{\text{DCG}(\{y\}, \{s\})}{\text{DCG}(\{y\}, \{y\})} \\
  \text{DCG}(\{y\}, \{s\}) =
  \sum_i \text{gain}(y_i) \cdot \text{rank_discount}(\text{rank}(s_i))
  $$

  where $\text{rank}(s_i)$ is the rank of item $i$ after sorting by scores
  $s$ with ties broken randomly.

  References:

    - [Cumulated gain-based evaluation of IR techniques, Järvelin et al,
       2002][jarvelin2002]

  [jarvelin2002]: https://dl.acm.org/doi/10.1145/582415.582418
  """

  def __init__(self,
               name=None,
               topn=None,
               gain_fn=None,
               rank_discount_fn=None,
               dtype=None,
               ragged=False,
               **kwargs):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_ranking/python/keras/metrics.py [732:792]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  r"""Discounted cumulative gain (DCG).

  Discounted cumulative gain ([Järvelin et al, 2002][jarvelin2002]).

  For each list of scores `s` in `y_pred` and list of labels `y` in `y_true`:

  ```
  DCG(y, s) = sum_i gain(y_i) * rank_discount(rank(s_i))
  ```

  NOTE: The `gain_fn` and `rank_discount_fn` should be keras serializable.
  Please see `tfr.keras.utils.pow_minus_1` and `tfr.keras.utils.log2_inverse` as
  examples when defining user customized functions.

  Standalone usage:

  >>> y_true = [[0., 1., 1.]]
  >>> y_pred = [[3., 1., 2.]]
  >>> dcg = tfr.keras.metrics.DCGMetric()
  >>> dcg(y_true, y_pred).numpy()
  1.1309297

  >>> # Using ragged tensors
  >>> y_true = tf.ragged.constant([[0., 1.], [1., 2., 0.]])
  >>> y_pred = tf.ragged.constant([[2., 1.], [2., 5., 4.]])
  >>> dcg = tfr.keras.metrics.DCGMetric(ragged=True)
  >>> dcg(y_true, y_pred).numpy()
  2.065465

  Usage with the `compile()` API:

  ```python
  model.compile(optimizer='sgd', metrics=[tfr.keras.metrics.DCGMetric()])
  ```

  Definition:

  $$
  \text{DCG}(\{y\}, \{s\}) =
  \sum_i \text{gain}(y_i) \cdot \text{rank_discount}(\text{rank}(s_i))
  $$

  where $\text{rank}(s_i)$ is the rank of item $i$ after sorting by scores
  $s$ with ties broken randomly.

  References:

    - [Cumulated gain-based evaluation of IR techniques, Järvelin et al,
       2002][jarvelin2002]

  [jarvelin2002]: https://dl.acm.org/doi/10.1145/582415.582418
  """

  def __init__(self,
               name=None,
               topn=None,
               gain_fn=None,
               rank_discount_fn=None,
               dtype=None,
               ragged=False,
               **kwargs):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



