8000 MAINT Clean up depreacted "log" loss of SGDClassifier for 1.3 (#25865) · scikit-learn/scikit-learn@d7f62b0 · GitHub
[go: up one dir, main page]

Skip to content

Commit d7f62b0

Browse files
authored
MAINT Clean up depreacted "log" loss of SGDClassifier for 1.3 (#25865)
1 parent ab3c060 commit d7f62b0

File tree

2 files changed

+6
-46
lines changed

2 files changed

+6
-46
lines changed

sklearn/linear_model/_stochastic_gradient.py

+5-21
Original file line numberDiff line numberDiff line change
@@ -158,14 +158,6 @@ def _more_validate_params(self, for_partial_fit=False):
158158
self._get_penalty_type(self.penalty)
159159
self._get_learning_rate_type(self.learning_rate)
160160

161-
# TODO(1.3): remove "log"
162-
if self.loss == "log":
163-
warnings.warn(
164-
"The loss 'log' was deprecated in v1.1 and will be removed in version "
165-
"1.3. Use `loss='log_loss'` which is equivalent.",
166-
FutureWarning,
167-
)
168-
169161
def _get_loss_function(self, loss):
170162
"""Get concrete ``LossFunction`` object for str ``loss``."""
171163
loss_ = self.loss_functions[loss]
@@ -501,13 +493,11 @@ def _get_plain_sgd_function(input_dtype):
501493

502494
class BaseSGDClassifier(LinearClassifierMixin, BaseSGD, metaclass=ABCMeta):
503495

504-
# TODO(1.3): Remove "log""
505496
loss_functions = {
506497
"hinge": (Hinge, 1.0),
507498
"squared_hinge": (SquaredHinge, 1.0),
508499
"perceptron": (Hinge, 0.0),
509500
"log_loss": (Log,),
510-
"log": (Log,),
511501
"modified_huber": (ModifiedHuber,),
512502
"squared_error": (SquaredLoss,),
513503
"huber": (Huber, DEFAULT_EPSILON),
@@ -517,7 +507,7 @@ class BaseSGDClassifier(LinearClassifierMixin, BaseSGD, metaclass=ABCMeta):
517507

518508
_parameter_constraints: dict = {
519509
**BaseSGD._parameter_constraints,
520-
"loss": [StrOptions(set(loss_functions), deprecated={"log"})],
510+
"loss": [StrOptions(set(loss_functions))],
521511
"early_stopping": ["boolean"],
522512
"validation_fraction": [Interval(Real, 0, 1, closed="neither")],
523513
"n_iter_no_change": [Interval(Integral, 1, None, closed="left")],
@@ -950,15 +940,15 @@ class SGDClassifier(BaseSGDClassifier):
950940
951941
Parameters
952942
----------
953-
loss : {'hinge', 'log_loss', 'log', 'modified_huber', 'squared_hinge',\
943+
loss : {'hinge', 'log_loss', 'modified_huber', 'squared_hinge',\
954944
'perceptron', 'squared_error', 'huber', 'epsilon_insensitive',\
955945
'squared_epsilon_insensitive'}, default='hinge'
956946
The loss function to be used.
957947
958948
- 'hinge' gives a linear SVM.
959949
- 'log_loss' gives logistic regression, a probabilistic classifier.
960950
- 'modified_huber' is another smooth loss that brings tolerance to
< 10000 div aria-hidden="true" class="position-absolute top-0 d-flex user-select-none DiffLineTableCellParts-module__comment-indicator--eI0hb">
961-
outliers as well as probability estimates.
951+
outliers as well as probability estimates.
962952
- 'squared_hinge' is like hinge but is quadratically penalized.
963953
- 'perceptron' is the linear loss used by the perceptron algorithm.
964954
- The other losses, 'squared_error', 'huber', 'epsilon_insensitive' and
@@ -969,10 +959,6 @@ class SGDClassifier(BaseSGDClassifier):
969959
More details about the losses formulas can be found in the
970960
:ref:`User Guide <sgd_mathematical_formulation>`.
971961
972-
.. deprecated:: 1.1
973-
The loss 'log' was deprecated in v1.1 and will be removed
974-
in version 1.3. Use `loss='log_loss'` which is equivalent.
975-
976962
penalty : {'l2', 'l1', 'elasticnet', None}, default='l2'
977963
The penalty (aka regularization term) to be used. Defaults to 'l2'
978964
which is the standard regularizer for linear SVM models. 'l1' and
@@ -1249,8 +1235,7 @@ def __init__(
12491235
)
12501236

12511237
def _check_proba(self):
1252-
# TODO(1.3): Remove "log"
1253-
if self.loss not in ("log_loss", "log", "modified_huber"):
1238+
if self.loss not in ("log_loss", "modified_huber"):
12541239
raise AttributeError(
12551240
"probability estimates are not available for loss=%r" % self.loss
12561241
)
@@ -1295,8 +1280,7 @@ def predict_proba(self, X):
12951280
"""
12961281
check_is_fitted(self)
12971282

1298-
# TODO(1.3): Remove "log"
1299-
if self.loss in ("log_loss", "log"):
1283+
if self.loss == "log_loss":
13001284
return self._predict_proba_lr(X)
13011285

13021286
elif self.loss == "modified_huber":

sklearn/linear_model/tests/test_sgd.py

+1-25
Original file line numberDiff line numberDiff line change
@@ -716,8 +716,7 @@ def test_sgd_predict_proba_method_access(klass):
716716
# details.
717717
for loss in linear_model.SGDClassifier.loss_functions:
718718
clf = SGDClassifier(loss=loss)
719-
# TODO(1.3): Remove "log"
720-
if loss in ("log_loss", "log", "modified_huber"):
719+
if loss in ("log_loss", "modified_huber"):
721720
assert hasattr(clf, "predict_proba")
722721
assert hasattr(clf, "predict_log_proba")
723722
else:
@@ -2060,29 +2059,6 @@ def test_SGDClassifier_fit_for_all_backends(backend):
20602059
assert_array_almost_equal(clf_sequential.coef_, clf_parallel.coef_)
20612060

20622061

2063-
# TODO(1.3): Remove
2064-
@pytest.mark.parametrize(
2065-
"old_loss, new_loss, Estimator",
2066-
[
2067-
("log", "log_loss", linear_model.SGDClassifier),
2068-
],
2069-
)
2070-
def test_loss_deprecated(old_loss, new_loss, Estimator):
2071-
2072-
# Note: class BaseSGD calls self._validate_params() in __init__, therefore
2073-
# even instantiation of class raises FutureWarning for deprecated losses.
2074-
with pytest.warns(FutureWarning, match=f"The loss '{old_loss}' was deprecated"):
2075-
est1 = Estimator(loss=old_loss, random_state=0)
2076-
est1.fit(X, Y)
2077-
2078-
est2 = Estimator(loss=new_loss, random_state=0)
2079-
est2.fit(X, Y)
2080-
if hasattr(est1, "predict_proba"):
2081-
assert_allclose(est1.predict_proba(X), est2.predict_proba(X))
2082-
else:
2083-
assert_allclose(est1.predict(X), est2.predict(X))
2084-
2085-
20862062
@pytest.mark.parametrize(
20872063
"Estimator", [linear_model.SGDClassifier, linear_model.SGDRegressor]
20882064
)

0 commit comments

Comments
 (0)
0