From 6c71431a0292b1968b2cdc7227f6a14adcda5244 Mon Sep 17 00:00:00 2001 From: Henri Bonamy Date: Mon, 5 May 2025 19:45:30 +0200 Subject: [PATCH 1/2] Added reference to PrecisionRecallDisplay in average_precision_score's See Also --- sklearn/metrics/_ranking.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sklearn/metrics/_ranking.py b/sklearn/metrics/_ranking.py index 560fd81076914..eb54f873d79e9 100644 --- a/sklearn/metrics/_ranking.py +++ b/sklearn/metrics/_ranking.py @@ -183,6 +183,7 @@ def average_precision_score( roc_auc_score : Compute the area under the ROC curve. precision_recall_curve : Compute precision-recall pairs for different probability thresholds. + PrecisionRecallDisplay : Used to visualize the precision recall curve. Notes ----- From b0e005506d0c2f021fa1bcc1d4a480347d8fa56d Mon Sep 17 00:00:00 2001 From: Arturo Amor <86408019+ArturoAmorQ@users.noreply.github.com> Date: Mon, 12 May 2025 11:00:35 +0200 Subject: [PATCH 2/2] Update sklearn/metrics/_ranking.py --- sklearn/metrics/_ranking.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sklearn/metrics/_ranking.py b/sklearn/metrics/_ranking.py index eb54f873d79e9..464e2d11ddf46 100644 --- a/sklearn/metrics/_ranking.py +++ b/sklearn/metrics/_ranking.py @@ -183,7 +183,10 @@ def average_precision_score( roc_auc_score : Compute the area under the ROC curve. precision_recall_curve : Compute precision-recall pairs for different probability thresholds. - PrecisionRecallDisplay : Used to visualize the precision recall curve. + PrecisionRecallDisplay.from_estimator : Plot the precision recall curve + using an estimator and data. + PrecisionRecallDisplay.from_predictions : Plot the precision recall curve + using true and predicted labels. Notes -----