@@ -156,13 +156,12 @@ def test_auc():
156156
157157
158158def test_auc_duplicate_values ():
159- """ Test Area Under Curve (AUC) computation with duplicate values
159+ # Test Area Under Curve (AUC) computation with duplicate values
160160
161- auc() was previously sorting the x and y arrays according to the indices
162- from numpy.argsort(x), which was reordering the tied 0's in this example
163- and resulting in an incorrect area computation. This test detects the
164- error.
165- """
161+ # auc() was previously sorting the x and y arrays according to the indices
162+ # from numpy.argsort(x), which was reordering the tied 0's in this example
163+ # and resulting in an incorrect area computation. This test detects the
164+ # error.
166165 x = [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.5 , 1. ]
167166 y = [0.1 , 0.2 , 0.3 , 0.4 , 0.5 , 0.6 , 0.7 , 0.8 , 0.9 ,
168167 1. , 1. , 1. , 1. , 1. , 1. , 1. , 1. ]
@@ -201,6 +200,17 @@ def test_precision_recall_f1_score_binary():
201200 assert_array_almost_equal (fs , 0.74 , 2 )
202201
203202
203+ def test_average_precision_score_duplicate_values ():
204+ # Duplicate values with precision-recall require a different
205+ # processing than when computing the AUC of a ROC, because the
206+ # precision-recall curve is a decreasing curve
207+ # The following situtation corresponds to a perfect
208+ # test statistic, the average_precision_score should be 1
209+ y_true = [ 0 , 0 , 0 , 0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ]
210+ y_score = [ 0 , .1 , .1 , .5 , .5 , .6 , .6 , .9 , .9 , 1 , 1 ]
211+ assert_equal (average_precision_score (y_true , y_score ), 1 )
212+
213+
204214def test_precision_recall_fscore_support_errors ():
205215 y_true , y_pred , _ = make_prediction (binary = True )
206216
0 commit comments