8000 STY PEP8 fix E731 in linear_model/_logistic.py (#16786) · gio8tisu/scikit-learn@c10a1b2 · GitHub
[go: up one dir, main page]

Skip to content

Commit c10a1b2

Browse files
Christian Lorentzengio8tisu
authored andcommitted
STY PEP8 fix E731 in linear_model/_logistic.py (scikit-learn#16786)
1 parent 95ab337 commit c10a1b2

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

sklearn/linear_model/_logistic.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -729,10 +729,10 @@ def _logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
729729
w0 = w0.ravel()
730730
target = Y_multi
731731
if solver == 'lbfgs':
732-
func = lambda x, *args: _multinomial_loss_grad(x, *args)[0:2]
732+
def func(x, *args): return _multinomial_loss_grad(x, *args)[0:2]
733733
elif solver == 'newton-cg':
734-
func = lambda x, *args: _multinomial_loss(x, *args)[0]
735-
grad = lambda x, *args: _multinomial_loss_grad(x, *args)[1]
734+
def func(x, *args): return _multinomial_loss(x, *args)[0]
735+
def grad(x, *args): return _multinomial_loss_grad(x, *args)[1]
736736
hess = _multinomial_grad_hess
737737
warm_start_sag = {'coef': w0.T}
738738
else:
@@ -741,7 +741,7 @@ def _logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
741741
func = _logistic_loss_and_grad
742742
elif solver == 'newton-cg':
743743
func = _logistic_loss
744-
grad = lambda x, *args: _logistic_loss_and_grad(x, *args)[1]
744+
def grad(x, *args): return _logistic_loss_and_grad(x, *args)[1]
745745
hess = _logistic_grad_hess
746746
warm_start_sag = {'coef': np.expand_dims(w0, axis=1)}
747747

@@ -1306,8 +1306,8 @@ def fit(self, X, y, sample_weight=None):
13061306
if self.penalty == 'elasticnet':
13071307
if (not isinstance(self.l1_ratio, numbers.Number) or
13081308
self.l1_ratio < 0 or self.l1_ratio > 1):
1309-
raise ValueError("l1_ratio must be between 0 and 1;"
1310-
" got (l1_ratio=%r)" % self.l1_ratio)
1309+
raise ValueError("l1_ratio must be between 0 and 1;"
1310+
" got (l1_ratio=%r)" % self.l1_ratio)
13111311
elif self.l1_ratio is not None:
13121312
warnings.warn("l1_ratio parameter is only used when penalty is "
13131313
"'elasticnet'. Got "

sklearn/utils/_testing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def assert_warns_message(warning_class, message, func, *args, **kw):
183183
if callable(message): # add support for certain tests
184184
check_in_message = message
185185
else:
186-
check_in_message = lambda msg: message in msg
186+
def check_in_message(msg): return message in msg
187187

188188
if check_in_message(msg):
189189
message_found = True

0 commit comments

Comments
 (0)
0