8000 cleaning · scikit-learn/scikit-learn@e6c776e · GitHub
[go: up one dir, main page]

Skip to content

Commit e6c776e

Browse files
committed
cleaning
1 parent 1e21983 commit e6c776e

File tree

1 file changed

+27
-35
lines changed

1 file changed

+27
-35
lines changed

sklearn/linear_model/logistic.py

Lines changed: 27 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -424,27 +424,22 @@ def hessp(v):
424424
return grad, hessp
425425

426426

427-
def _check_solver_option(solver, multi_class, penalty, dual,
428-
previous_default_solver='liblinear'):
427+
def _check_solver_option(solver, multi_class, penalty, dual):
429428

430-
# default values raises a future warning
429+
# Default values raises a future warning
431430
if solver == 'warn':
432-
# previous_default_solver is used since LogisticRegression and
433-
# LogisticRegressionCV don't have the same default in 0.19.
434-
solver = previous_default_solver
435-
436-
# Do not warn if the 'auto' solver selects the previous default solver
437-
if previous_default_solver != 'lbfgs':
438-
warnings.warn("Default solver will be changed to 'lbfgs' in 0.22. "
439-
"Use a specific solver to silence this warning.",
440-
FutureWarning)
431+
solver = 'liblinear'
432+
warnings.warn("Default solver will be changed to 'lbfgs' in 0.22. "
433+
"Use a specific solver to silence this warning.",
434+
FutureWarning)
441435

442436
if multi_class == 'warn':
443437
multi_class = 'ovr'
444438
warnings.warn("Default multi_class will be changed to 'multinomial' in"
445439
" 0.22. Use a specific option to silence this warning.",
446440
FutureWarning)
447441

442+
# Check the string parameters
448443
if multi_class not in ['multinomial', 'ovr']:
449444
raise ValueError("multi_class should be either multinomial or "
450445
"ovr, got %s." % multi_class)
@@ -477,7 +472,7 @@ def _check_solver_option(solver, multi_class, penalty, dual,
477472

478473
def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
479474
max_iter=100, tol=1e-4, verbose=0,
480-
solver='warn', coef=None,
475+
solver='lbfgs', coef=None,
481476
class_weight=None, dual=False, penalty='l2',
482477
intercept_scaling=1., multi_class='warn',
483478
random_state=None, check_input=True,
@@ -527,7 +522,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
527522
For the liblinear and lbfgs solvers set verbose to any positive
528523
number for verbosity.
529524
530-
solver : {'lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga', 'auto'}
525+
solver : {'lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga'}
531526
Numerical solver to use.
532527
533528
coef : array-like, shape (n_features,), default None
@@ -627,7 +622,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
627622

628623
n_samples, n_features = X.shape
629624
solver, multi_class = _check_solver_option(
630-
solver, multi_class, penalty, dual, 'lbfgs')
625+
solver, multi_class, penalty, dual)
631626

632627
classes = np.unique(y)
633628
random_state = check_random_state(random_state)
@@ -805,7 +800,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
805800
def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
806801
scoring=None, fit_intercept=False,
807802
max_iter=100, tol=1e-4, class_weight=None,
808-
verbose=0, solver='warn', penalty='l2',
803+
verbose=0, solver='lbfgs', penalty='l2',
809804
dual=False, intercept_scaling=1.,
810805
multi_class='warn', random_state=None,
811806
max_squared_sum=None, sample_weight=None):
@@ -867,7 +862,7 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
867862
For the liblinear and lbfgs solvers set verbose to any positive
868863
number for verbosity.
869864
870-
solver : {'lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga', 'auto'}
865+
solver : {'lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga'}
871866
Decides which solver to use.
872867
873868
penalty : str, 'l1' or 'l2'
@@ -933,7 +928,7 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
933928
"""
934929
n_samples, n_features = X.shape
935930
solver, multi_class = _check_solver_option(
936-
solver, multi_class, penalty, dual, 'lbfgs')
931+
solver, multi_class, penalty, dual)
937932

938933
X_train = X[train]
939934
X_test = X[test]
@@ -1075,8 +1070,8 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
10751070
instance used by `np.random`. Used when ``solver`` == 'sag' or
10761071
'liblinear'.
10771072
1078-
solver : str, {'newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga', 'auto'},
1079-
default: 'liblinear'. Will be changed to 'auto' solver in 0.22.
1073+
solver : str, {'newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga'},
1074+
default: 'liblinear'. Will be changed to 'lbfgs' solver in 0.22.
10801075
Algorithm to use in the optimization problem.
10811076
10821077
- For small datasets, 'liblinear' is a good choice, whereas 'sag' and
@@ -1086,8 +1081,6 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
10861081
schemes.
10871082
- 'newton-cg', 'lbfgs' and 'sag' only handle L2 penalty, whereas
10881083
'liblinear' and 'saga' handle L1 penalty.
3262
1089-
- 'auto' automatically chooses a solver based on the penalty
1090-
parameter.
10911084
10921085
Note that 'sag' and 'saga' fast convergence is only guaranteed on
10931086
features with approximately the same scale. You can
@@ -1097,8 +1090,8 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
10971090
Stochastic Average Gradient descent solver.
10981091
.. versionadded:: 0.19
10991092
SAGA solver.
1100-
.. versionadded:: 0.20
1101-
'auto' solver.
1093+
.. versionchanged:: 0.20
1094+
Default will change from 'liblinear' to 'lbfgs' in 0.22.
11021095
11031096
max_iter : int, default: 100
11041097
Useful only for the newton-cg, sag and lbfgs solvers.
@@ -1114,6 +1107,8 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
11141107
11151108
.. versionadded:: 0.18
11161109
Stochastic Average Gradient descent solver for 'multinomial' case.
1110+
.. versionchanged:: 0.20
1111+
Default will change from 'ovr' to 'multinomial' in 0.22.
11171112
11181113
verbose : int, default: 0
11191114
For the liblinear and lbfgs solvers set verbose to any positive
@@ -1254,8 +1249,7 @@ def fit(self, X, y, sample_weight=None):
12541249
"positive; got (tol=%r)" % self.tol)
12551250

12561251
solver, multi_class = _check_solver_option(
1257-
self.solver, self.multi_class, self.penalty, self.dual,
1258-
'liblinear')
1252+
self.solver, self.multi_class, self.penalty, self.dual)
12591253

12601254
if solver in ['newton-cg']:
12611255
_dtype = [np.float64, np.float32]
@@ -1381,7 +1375,7 @@ def predict_proba(self, X):
13811375

13821376
# This check can be removed in 0.22, changing back to self.multi_class
13831377
_, multi_class = _check_solver_option(
1384-
self.solver, self.multi_class, self.penalty, self.dual, 'lbfgs')
1378+
self.solver, self.multi_class, self.penalty, self.dual)
13851379

13861380
if multi_class == "ovr":
13871381
return super(LogisticRegression, self)._predict_proba_lr(X)
@@ -1476,8 +1470,8 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
14761470
that can be used, look at :mod:`sklearn.metrics`. The
14771471
default scoring option used is 'accuracy'.
14781472
1479-
solver : str, {'newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga', 'auto'},
1480-
default: 'lbfgs'. Will be changed to 'auto' solver in 0.22.
1473+
solver : str, {'newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga'},
1474+
default: 'lbfgs'.
14811475
Algorithm to use in the optimization problem.
14821476
14831477
- For small datasets, 'liblinear' is a good choice, whereas 'sag' and
@@ -1489,8 +1483,6 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
14891483
'liblinear' and 'saga' handle L1 penalty.
14901484
- 'liblinear' might be slower in LogisticRegressionCV because it does
14911485
not handle warm-starting.
1492-
- 'auto' automatically chooses a solver based on the penalty
1493-
parameter.
14941486
14951487
Note that 'sag' and 'saga' fast convergence is only guaranteed on
14961488
features with approximately the same scale. You can preprocess the data
@@ -1500,8 +1492,6 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
15001492
Stochastic Average Gradient descent solver.
15011493
.. versionadded:: 0.19
15021494
SAGA solver.
1503-
.. versionadded:: 0.20
1504-
'auto' solver.
15051495
15061496
tol : float, optional
15071497
Tolerance for stopping criteria.
@@ -1561,6 +1551,8 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
15611551
15621552
.. versionadded:: 0.18
15631553
Stochastic Average Gradient descent solver for 'multinomial' case.
1554+
.. versionchanged:: 0.20
1555+
Default will change from 'ovr' to 'multinomial' in 0.22.
15641556
15651557
random_state : int, RandomState instance or None, optional, default None
15661558
If int, random_state is the seed used by the random number generator;
@@ -1621,7 +1613,7 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
16211613
16221614
"""
16231615
def __init__(self, Cs=10, fit_intercept=True, cv='warn', dual=False,
1624-
penalty='l2', scoring=None, solver='warn', tol=1e-4,
1616+
penalty='l2', scoring=None, solver='lbfgs', tol=1e-4,
16251617
max_iter=100, class_weight=None, n_jobs=1, verbose=0,
16261618
refit=True, intercept_scaling=1., multi_class='warn',
16271619
random_state=None):
@@ -1663,7 +1655,7 @@ def fit(self, X, y, sample_weight=None):
16631655
self : object
16641656
"""
16651657
solver, multi_class = _check_solver_option(
1666-
self.solver, self.multi_class, self.penalty, self.dual, 'lbfgs')
1658+
self.solver, self.multi_class, self.penalty, self.dual)
16671659

16681660
if not isinstance(self.max_iter, numbers.Number) or self.max_iter < 0:
16691661
raise ValueError("Maximum number of iteration must be positive;"

0 commit comments

Comments
 (0)
0