From 1fb18d7683815a3e24c1a4ad20649879fb59ad56 Mon Sep 17 00:00:00 2001 From: Srinath Reddy Chitteti Date: Thu, 2 Dec 2021 19:35:30 +0530 Subject: [PATCH 1/2] Added Fall out , miss rate , specificity --- sklearn/metrics/_classification.py | 55 ++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/sklearn/metrics/_classification.py b/sklearn/metrics/_classification.py index ac726dd816a13..0872d8b817af3 100644 --- a/sklearn/metrics/_classification.py +++ b/sklearn/metrics/_classification.py @@ -2688,3 +2688,58 @@ def brier_score_loss(y_true, y_prob, *, sample_weight=None, pos_label=None): raise y_true = np.array(y_true == pos_label, int) return np.average((y_true - y_prob) ** 2, weights=sample_weight) + +def Fall_out(y_pred , y_true ): + mean = (None , ) + warn_for = "Fall Out" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() + Fall_out = _prf_divide(np.array([false_pos]), + np.array([false_pos + true_neg]), + "Fall Out", + "predicted", + mean, + warn_for, + zero_division + ) + return Fall_out[0] + + +def Miss_rate(y_pred , y_true): + mean = (None , ) + warn_for = "Miss Rate" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() + Fall_out = _prf_divide(np.array([false_neg]), + np.array([false_neg + true_pos]), + "Miss Rate", + "predicted", + mean, + warn_for, + zero_division + ) + return Miss_rate[0] + +def Specificity(y_pred , y_true): + mean = (None , ) + warn_for = "Specificity" + zero_division = "warn" + y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) + assert_all_finite(y_pred) + assert_all_finite(y_true) + true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() + Fall_out = _prf_divide(np.array([true_neg]), + np.array([true_neg + false_pos]), + "Specificity", + "predicted", + mean, + warn_for, + zero_division + ) + return Specificity[0] \ No newline at end of file From 52b14629ecfb2889137392e025f9d9eabbd67164 Mon Sep 17 00:00:00 2001 From: Srinath Reddy Chitteti Date: Thu, 2 Dec 2021 20:07:43 +0530 Subject: [PATCH 2/2] Added Fall out , miss rate , specificity --- sklearn/metrics/_classification.py | 70 +++++++++++++++++------------- 1 file changed, 39 insertions(+), 31 deletions(-) diff --git a/sklearn/metrics/_classification.py b/sklearn/metrics/_classification.py index 0872d8b817af3..f620e6631cee9 100644 --- a/sklearn/metrics/_classification.py +++ b/sklearn/metrics/_classification.py @@ -2689,23 +2689,26 @@ def brier_score_loss(y_true, y_prob, *, sample_weight=None, pos_label=None): y_true = np.array(y_true == pos_label, int) return np.average((y_true - y_prob) ** 2, weights=sample_weight) -def Fall_out(y_pred , y_true ): + +def Fall_out(y_pred , y_true): mean = (None , ) warn_for = "Fall Out" zero_division = "warn" y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) assert_all_finite(y_pred) assert_all_finite(y_true) - true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() - Fall_out = _prf_divide(np.array([false_pos]), - np.array([false_pos + true_neg]), - "Fall Out", - "predicted", - mean, - warn_for, - zero_division - ) - return Fall_out[0] + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + fall_out = _prf_divide(np.array([FP]), + np.array([FP + TN]), + "Fall Out", + "predicted", + mean, + warn_for, + zero_division + ) + result = fall_out[0] + return result + def Miss_rate(y_pred , y_true): @@ -2715,16 +2718,19 @@ def Miss_rate(y_pred , y_true): y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) assert_all_finite(y_pred) assert_all_finite(y_true) - true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() - Fall_out = _prf_divide(np.array([false_neg]), - np.array([false_neg + true_pos]), - "Miss Rate", - "predicted", - mean, - warn_for, - zero_division - ) - return Miss_rate[0] + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + miss_rate = _prf_divide(np.array([FN]), + np.array([FN + TP]), + "Miss Rate", + "predicted", + mean, + warn_for, + zero_division + ) + result = miss_rate[0] + return result + + def Specificity(y_pred , y_true): mean = (None , ) @@ -2733,13 +2739,15 @@ def Specificity(y_pred , y_true): y_pred , y_true = column_or_1d(y_pred) , column_or_1d(y_true) assert_all_finite(y_pred) assert_all_finite(y_true) - true_neg , false_pos , false_neg , true_pos = confusion_matrix(y_true, y_pred).ravel() - Fall_out = _prf_divide(np.array([true_neg]), - np.array([true_neg + false_pos]), - "Specificity", - "predicted", - mean, - warn_for, - zero_division - ) - return Specificity[0] \ No newline at end of file + TN , FP , FN , TP = confusion_matrix(y_true, y_pred).ravel() + specificity = _prf_divide(np.array([TN]), + np.array([TN + FP]), + "Specificity", + "predicted", + mean, + warn_for, + zero_division + ) + result = specificity[0] + return result +