def private_prediction()

in private_prediction.py [0:0]


def private_prediction(predictions, epsilon=0.):
    """
    Performs private prediction for N examples given an NxK matrix `predictions`
    that contains K predictions for each example, which were produced by K
    classifiers trained on disjoint training sets.

    The parameter `epsilon` controls the privacy of the prediction: a value of
    0 (default) implies maximum privacy by randomly picking a class, and a value
    of `math.inf` performs a majority vote over the K predictions.

    The private prediction algorithm used is described in Dwork & Feldman (2018).
    """
    assert predictions.dim() == 2, "predictions must be 2D matrix"
    assert epsilon >= 0., "epsilon cannot be negative"

    # count the votes in the predictions:
    N, K = predictions.size()
    num_classes = predictions.max() + 1
    counts = torch.zeros(N, num_classes)
    for c in range(num_classes):
        counts[:, c] = (predictions == c).sum(dim=1)

    # perform private prediction by sampling from smoothed Gibbs distribution on counts:
    if epsilon == math.inf:
        return counts.argmax(dim=1)
    else:
        logits = counts.mul(epsilon)
        probabilities = logits.sub(logits.max(dim=1, keepdim=True).values).exp_()
        probabilities.div_(probabilities.sum(dim=1, keepdim=True))
        return noise.categorical(probabilities)