diff --git a/sklearn/metrics/_classification.py b/sklearn/metrics/_classification.py index ac726dd816a13..f620e6631cee9 100644 --- a/sklearn/metrics/_classification.py +++ b/sklearn/metrics/_classification.py @@ -2688,3 +2688,66 @@ def brier_score_loss(y_true, y_prob, *, sample_weight=None, pos_label=None): raise y_true = np.array(y_true == pos_label, int) return np.average((y_true - y_prob) ** 2, weights=sample_weight) + + +def Fall_out(y_pred , y_true): + mean = (None , ) + warn_for = "Fall Out" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + fall_out = _prf_divide(np.array([FP]), + np.array([FP + TN]), + "Fall Out", + "predicted", + mean, + warn_for, + zero_division + ) + result = fall_out[0] + return result + + + +def Miss_rate(y_pred , y_true): + mean = (None , ) + warn_for = "Miss Rate" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + miss_rate = _prf_divide(np.array([FN]), + np.array([FN + TP]), + "Miss Rate", + "predicted", + mean, + warn_for, + zero_division + ) + result = miss_rate[0] + return result + + + +def Specificity(y_pred , y_true): + mean = (None , ) + warn_for = "Specificity" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + specificity = _prf_divide(np.array([TN]), + np.array([TN + FP]), + "Specificity", + "predicted", + mean, + warn_for, + zero_division + ) + result = specificity[0] + return result +