def average_precision()

in next_steps/data_science/offline_performance_evaluation/metrics.py [0:0]


def average_precision(r):
    """Score is average precision (area under PR curve)

    Relevance is binary (nonzero is relevant).

    >>> r = [1, 1, 0, 1, 0, 1, 0, 0, 0, 1]
    >>> delta_r = 1. / sum(r)
    >>> sum([sum(r[:x + 1]) / (x + 1.) * delta_r for x, y in enumerate(r) if y])
    0.7833333333333333
    >>> average_precision(r)
    0.78333333333333333

    Args:
        r: Relevance scores (list or numpy) in rank order
            (first element is the first item)

    Returns:
        Average precision
    """
    r = [b != 0 for b in r]
    out=np.zeros(np.size(r))-1
    for k in range(np.size(out)):
    	if r[k]:
    		out[k]=precision_at_k(r, k + 1)
    out=out[out>=0]
    if len(out)==0:
        return 0.
    return np.mean(out)