8000 MAINT Clean deprecation of normalize in calibration_curve for 1.3 (#2… · scikit-learn/scikit-learn@fd9bff1 · GitHub
[go: up one dir, main page]

Skip to content

Commit fd9bff1

Browse files
authored
MAINT Clean deprecation of normalize in calibration_curve for 1.3 (#25833)
1 parent 7057727 commit fd9bff1

File tree

2 files changed

+0
-45
lines changed

2 files changed

+0
-45
lines changed

sklearn/calibration.py

Lines changed: 0 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -908,7 +908,6 @@ def calibration_curve(
908908
y_prob,
909909
*,
910910
pos_label=None,
911-
normalize="deprecated",
912911
n_bins=5,
913912
strategy="uniform",
914913
):
@@ -934,17 +933,6 @@ def calibration_curve(
934933
935934
.. versionadded:: 1.1
936935
937-
normalize : bool, default="deprecated"
938-
Whether y_prob needs to be normalized into the [0, 1] interval, i.e.
939-
is not a proper probability. If True, the smallest value in y_prob
940-
is linearly mapped onto 0 and the largest one onto 1.
941-
942-
.. deprecated:: 1.1
943-
The normalize argument is deprecated in v1.1 and will be removed in v1.3.
944-
Explicitly normalizing `y_prob` will reproduce this behavior, but it is
945-
recommended that a proper probability is used (i.e. a classifier's
946-
`predict_proba` positive class).
947-
948936
n_bins : int, default=5
949937
Number of bins to discretize the [0, 1] interval. A bigger number
950938
requires more data. Bins with no samples (i.e. without
@@ -992,19 +980,6 @@ def calibration_curve(
992980
check_consistent_length(y_true, y_prob)
993981
pos_label = _check_pos_label_consistency(pos_label, y_true)
994982

995-
# TODO(1.3): Remove normalize conditional block.
996-
if normalize != "deprecated":
997-
warnings.warn(
998-
"The normalize argument is deprecated in v1.1 and will be removed in v1.3."
999-
" Explicitly normalizing y_prob will reproduce this behavior, but it is"
1000-
" recommended that a proper probability is used (i.e. a classifier's"
1001-
" `predict_proba` positive class or `decision_function` output calibrated"
1002-
" with `CalibratedClassifierCV`).",
1003-
FutureWarning,
1004-
)
1005-
if normalize: # Normalize predicted values into interval [0, 1]
1006-
y_prob = (y_prob - y_prob.min()) / (y_prob.max() - y_prob.min())
1007-
1008983
if y_prob.min() < 0 or y_prob.max() > 1:
1009984
raise ValueError("y_prob has values outside [0, 1].")
1010985

sklearn/tests/test_calibration.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -401,26 +401,6 @@ def test_calibration_curve():
401401
calibration_curve(y_true2, y_pred2, strategy="percentile")
402402

403403

404-
# TODO(1.3): Remove this test.
405-
def test_calibration_curve_with_unnormalized_proba():
406-
"""Tests the `normalize` parameter of `calibration_curve`"""
407-
y_true = np.array([0, 0, 0, 1, 1, 1])
408-
y_pred = np.array([0.0, 0.1, 0.2, 0.8, 0.9, 1.0])
409-
410-
# Ensure `normalize` == False raises a FutureWarning.
411-
with pytest.warns(FutureWarning):
412-
calibration_curve(y_true, y_pred, n_bins=2, normalize=False)
413-
414-
# Ensure `normalize` == True raises a FutureWarning and behaves as expected.
415-
with pytest.warns(FutureWarning):
416-
prob_true_unnormalized, prob_pred_unnormalized = calibration_curve(
417-
y_true, y_pred * 2, n_bins=2, normalize=True
418-
)
419-
prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=2)
420-
assert_almost_equal(prob_true, prob_true_unnormalized)
421-
assert_almost_equal(prob_pred, prob_pred_unnormalized)
422-
423-
424404
@pytest.mark.parametrize("ensemble", [True, False])
425405
def test_calibration_nan_imputer(ensemble):
426406
"""Test that calibration can accept nan"""

0 commit comments

Comments
 (0)
0