@@ -729,10 +729,10 @@ def _logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
729
729
w0 = w0 .ravel ()
730
730
target = Y_multi
731
731
if solver == 'lbfgs' :
732
- func = lambda x , * args : _multinomial_loss_grad (x , * args )[0 :2 ]
732
+ def func ( x , * args ): return _multinomial_loss_grad (x , * args )[0 :2 ]
733
733
elif solver == 'newton-cg' :
734
- func = lambda x , * args : _multinomial_loss (x , * args )[0 ]
735
- grad = lambda x , * args : _multinomial_loss_grad (x , * args )[1 ]
734
+ def func ( x , * args ): return _multinomial_loss (x , * args )[0 ]
735
+ def grad ( x , * args ): return _multinomial_loss_grad (x , * args )[1 ]
736
736
hess = _multinomial_grad_hess
737
737
warm_start_sag = {'coef' : w0 .T }
738
738
else :
@@ -741,7 +741,7 @@ def _logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
741
741
func = _logistic_loss_and_grad
742
742
elif solver == 'newton-cg' :
743
743
func = _logistic_loss
744
- grad = lambda x , * args : _logistic_loss_and_grad (x , * args )[1 ]
744
+ def grad ( x , * args ): return _logistic_loss_and_grad (x , * args )[1 ]
745
745
hess = _logistic_grad_hess
746
746
warm_start_sag = {'coef' : np .expand_dims (w0 , axis = 1 )}
747
747
@@ -1306,8 +1306,8 @@ def fit(self, X, y, sample_weight=None):
1306
1306
if self .penalty == 'elasticnet' :
1307
1307
if (not isinstance (self .l1_ratio , numbers .Number ) or
1308
1308
self .l1_ratio < 0 or self .l1_ratio > 1 ):
1309
- raise ValueError ("l1_ratio must be between 0 and 1;"
1310
- " got (l1_ratio=%r)" % self .l1_ratio )
1309
+ raise ValueError ("l1_ratio must be between 0 and 1;"
1310
+ " got (l1_ratio=%r)" % self .l1_ratio )
1311
1311
elif self .l1_ratio is not None :
1312
1312
warnings .warn ("l1_ratio parameter is only used when penalty is "
1313
1313
"'elasticnet'. Got "
0 commit comments