@@ -158,14 +158,6 @@ def _more_validate_params(self, for_partial_fit=False):
158
158
self ._get_penalty_type (self .penalty )
159
159
self ._get_learning_rate_type (self .learning_rate )
160
160
161
- # TODO(1.3): remove "log"
162
- if self .loss == "log" :
163
- warnings .warn (
164
- "The loss 'log' was deprecated in v1.1 and will be removed in version "
165
- "1.3. Use `loss='log_loss'` which is equivalent." ,
166
- FutureWarning ,
167
- )
168
-
169
161
def _get_loss_function (self , loss ):
170
162
"""Get concrete ``LossFunction`` object for str ``loss``."""
171
163
loss_ = self .loss_functions [loss ]
@@ -501,13 +493,11 @@ def _get_plain_sgd_function(input_dtype):
501
493
502
494
class BaseSGDClassifier (LinearClassifierMixin , BaseSGD , metaclass = ABCMeta ):
503
495
504
- # TODO(1.3): Remove "log""
505
496
loss_functions = {
506
497
"hinge" : (Hinge , 1.0 ),
507
498
"squared_hinge" : (SquaredHinge , 1.0 ),
508
499
"perceptron" : (Hinge , 0.0 ),
509
500
"log_loss" : (Log ,),
510
- "log" : (Log ,),
511
501
"modified_huber" : (ModifiedHuber ,),
512
502
"squared_error" : (SquaredLoss ,),
513
503
"huber" : (Huber , DEFAULT_EPSILON ),
@@ -517,7 +507,7 @@ class BaseSGDClassifier(LinearClassifierMixin, BaseSGD, metaclass=ABCMeta):
517
507
518
508
_parameter_constraints : dict = {
519
509
** BaseSGD ._parameter_constraints ,
520
- "loss" : [StrOptions (set (loss_functions ), deprecated = { "log" } )],
510
+ "loss" : [StrOptions (set (loss_functions ))],
521
511
"early_stopping" : ["boolean" ],
522
512
"validation_fraction" : [Interval (Real , 0 , 1 , closed = "neither" )],
523
513
"n_iter_no_change" : [Interval (Integral , 1 , None , closed = "left" )],
@@ -950,15 +940,15 @@ class SGDClassifier(BaseSGDClassifier):
950
940
951
941
Parameters
952
942
----------
953
- loss : {'hinge', 'log_loss', 'log', ' modified_huber', 'squared_hinge',\
943
+ loss : {'hinge', 'log_loss', 'modified_huber', 'squared_hinge',\
954
944
'perceptron', 'squared_error', 'huber', 'epsilon_insensitive',\
955
945
'squared_epsilon_insensitive'}, default='hinge'
956
946
The loss function to be used.
957
947
958
948
- 'hinge' gives a linear SVM.
959
949
- 'log_loss' gives logistic regression, a probabilistic classifier.
960
950
- 'modified_huber' is another smooth loss that brings tolerance to
<
10000
div aria-hidden="true" class="position-absolute top-0 d-flex user-select-none DiffLineTableCellParts-module__comment-indicator--eI0hb">961
- outliers as well as probability estimates.
951
+ outliers as well as probability estimates.
962
952
- 'squared_hinge' is like hinge but is quadratically penalized.
963
953
- 'perceptron' is the linear loss used by the perceptron algorithm.
964
954
- The other losses, 'squared_error', 'huber', 'epsilon_insensitive' and
@@ -969,10 +959,6 @@ class SGDClassifier(BaseSGDClassifier):
969
959
More details about the losses formulas can be found in the
970
960
:ref:`User Guide <sgd_mathematical_formulation>`.
971
961
972
- .. deprecated:: 1.1
973
- The loss 'log' was deprecated in v1.1 and will be removed
974
- in version 1.3. Use `loss='log_loss'` which is equivalent.
975
-
976
962
penalty : {'l2', 'l1', 'elasticnet', None}, default='l2'
977
963
The penalty (aka regularization term) to be used. Defaults to 'l2'
978
964
which is the standard regularizer for linear SVM models. 'l1' and
@@ -1249,8 +1235,7 @@ def __init__(
1249
1235
)
1250
1236
1251
1237
def _check_proba (self ):
1252
- # TODO(1.3): Remove "log"
1253
- if self .loss not in ("log_loss" , "log" , "modified_huber" ):
1238
+ if self .loss not in ("log_loss" , "modified_huber" ):
1254
1239
raise AttributeError (
1255
1240
"probability estimates are not available for loss=%r" % self .loss
1256
1241
)
@@ -1295,8 +1280,7 @@ def predict_proba(self, X):
1295
1280
"""
1296
1281
check_is_fitted (self )
1297
1282
1298
- # TODO(1.3): Remove "log"
1299
- if self .loss in ("log_loss" , "log" ):
1283
+ if self .loss == "log_loss" :
1300
1284
return self ._predict_proba_lr (X )
1301
1285
1302
1286
elif self .loss == "modified_huber" :
0 commit comments