in src/ppxgboost/PPBooster.py [0:0]
def client_side_multiclass_compute(predictions):
"""
Output the actual predictions using the softmax methods.
In particular, it computes the normalized exponential function, which converts a vector
of K real numbers and normalizes it into a probability distribution consisting of K
probability scores proportional to the exponentials of the input numbers, i.e. SoftMax assigns
decimal probabilities to each class in a multi-class problem.
Those decimal probabilities must add up to 1.0.
:param predictions: a list of predictions, where each element is a list that contains
the scores for each inputs - here, we allow the client to receive a list of
prediction (consistent to xgboost's prediction) for the corresponding queries.
:return: predicted classes: a list of the most probable classes the model predicts.
"""
final_output = []
# Predictions is a vector of results, the value can be any real floating point number
# e.g. predictions for k classes -- predictions = [[x_1,... x_k], [y_1,... y_k], [z_1, ..., z_k]]
for predict_i in predictions:
# gets the sum of all the exponential predicted results
# e.g. e^(x_1) + ... + e^(x_k)
sum_score = np.sum(np.exp(predict_i))
output = []
for x_i in predict_i:
# for each t in predict_i
# e.g. e^(x_i)/(e^(x_1) + ... + e^(x_k))
output.append(np.exp(x_i) / sum_score)
# report the argmax as the predicted class (most probable).
final_output.append(np.argmax(output))
return final_output