def model_performance_report()

in src/smclarify/bias/report.py [0:0]


def model_performance_report(df: pd.DataFrame, label_column: LabelColumn, predicted_label_column: LabelColumn) -> Dict:
    """
    Generate model performance report on a dataset.
    :param df: Dataset as a pandas.DataFrame
    :param label_column: description of column which has the labels.
    :param predicted_label_column: description of column with predicted labels
    :return: a dictionary with metrics for different label values
    """
    assert label_column.positive_label_values

    positive_label_values: List[Any] = label_column.positive_label_values
    label_data_type, label_data_series = common.ensure_series_data_type(label_column.series, positive_label_values)

    positive_label_index, _ = _positive_label_index(
        data=label_data_series, data_type=label_data_type, positive_values=positive_label_values
    )
    if label_column.name in df.columns:
        df = df.drop(labels=label_column.name, axis=1)

    predicted_label_data_type, predicted_label_data_series = common.ensure_series_data_type(
        predicted_label_column.series, positive_label_values
    )
    positive_predicted_label_index = _positive_predicted_index(
        predicted_label_data=predicted_label_data_series,
        predicted_label_datatype=predicted_label_data_type,
        label_data=label_data_series,
        label_datatype=label_data_type,
        positive_label_values=positive_label_values,
    )

    perf_metrics: List[MetricResult] = _model_performance_metric_call_wrapper(
        df, positive_label_index, positive_predicted_label_index
    )
    binary_confusion_matrix = common.binary_confusion_matrix(df, positive_label_index, positive_predicted_label_index)
    if label_data_type == common.DataType.CATEGORICAL:
        try:
            multicategory_confusion_matrix = basic_stats.multicategory_confusion_matrix(
                label_data_series, predicted_label_data_series
            )
        except Exception as e:
            multicategory_confusion_matrix = {"error": {str(e): 0.0}}
            logger.warning("Multicategory Confusion Matrix failed to compute due to: %s", e)

        return ModelPerformanceReport(
            label_column.name, perf_metrics, binary_confusion_matrix, multicategory_confusion_matrix
        ).to_json()

    return ModelPerformanceReport(label_column.name, perf_metrics, binary_confusion_matrix).to_json()