@@ -437,13 +437,13 @@ def _check_solver(solver, penalty, dual):
437437 raise ValueError ("Logistic Regression supports only solvers in %s, got"
438438 " %s." % (all_solvers , solver ))
439439
440- all_penalties = ['l1' , 'l2' , 'elasticnet' ]
440+ all_penalties = ['l1' , 'l2' , 'elasticnet' , 'none' ]
441441 if penalty not in all_penalties :
442442 raise ValueError ("Logistic Regression supports only penalties in %s,"
443443 " got %s." % (all_penalties , penalty ))
444444
445- if solver not in ['liblinear' , 'saga' ] and penalty != 'l2' :
446- raise ValueError ("Solver %s supports only l2 penalties, "
445+ if solver not in ['liblinear' , 'saga' ] and penalty not in ( 'l2' , 'none' ) :
446+ raise ValueError ("Solver %s supports only 'l2' or 'none' penalties, "
447447 "got %s penalty." % (solver , penalty ))
448448 if solver != 'liblinear' and dual :
449449 raise ValueError ("Solver %s supports only "
@@ -452,6 +452,12 @@ def _check_solver(solver, penalty, dual):
452452 if penalty == 'elasticnet' and solver != 'saga' :
453453 raise ValueError ("Only 'saga' solver supports elasticnet penalty,"
454454 " got solver={}." .format (solver ))
455+
456+ if solver == 'liblinear' and penalty == 'none' :
457+ raise ValueError (
458+ "penalty='none' is not supported for the liblinear solver"
459+ )
460+
455461 return solver
456462
457463
@@ -1205,24 +1211,27 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
12051211 'sag', 'saga' and 'newton-cg' solvers.)
12061212
12071213 This class implements regularized logistic regression using the
1208- 'liblinear' library, 'newton-cg', 'sag', 'saga' and 'lbfgs' solvers. It can
1209- handle both dense and sparse input. Use C-ordered arrays or CSR matrices
1210- containing 64-bit floats for optimal performance; any other input format
1211- will be converted (and copied).
1214+ 'liblinear' library, 'newton-cg', 'sag', 'saga' and 'lbfgs' solvers. **Note
1215+ that regularization is applied by default**. It can handle both dense
1216+ and sparse input. Use C-ordered arrays or CSR matrices containing 64-bit
1217+ floats for optimal performance; any other input format will be converted
1218+ (and copied).
12121219
12131220 The 'newton-cg', 'sag', and 'lbfgs' solvers support only L2 regularization
1214- with primal formulation. The 'liblinear' solver supports both L1 and L2
1215- regularization, with a dual formulation only for the L2 penalty. The
1216- Elastic-Net regularization is only supported by the 'saga' solver.
1221+ with primal formulation, or no regularization. The 'liblinear' solver
1222+ supports both L1 and L2 regularization, with a dual formulation only for
1223+ the L2 penalty. The Elastic-Net regularization is only supported by the
1224+ 'saga' solver.
12171225
12181226 Read more in the :ref:`User Guide <logistic_regression>`.
12191227
12201228 Parameters
12211229 ----------
1222- penalty : str, 'l1', 'l2', or 'elasticnet ', optional (default='l2')
1230+ penalty : str, 'l1', 'l2', 'elasticnet' or 'none ', optional (default='l2')
12231231 Used to specify the norm used in the penalization. The 'newton-cg',
12241232 'sag' and 'lbfgs' solvers support only l2 penalties. 'elasticnet' is
1225- only supported by the 'saga' solver.
1233+ only supported by the 'saga' solver. If 'none' (not supported by the
1234+ liblinear solver), no regularization is applied.
12261235
12271236 .. versionadded:: 0.19
12281237 l1 penalty with SAGA solver (allowing 'multinomial' + L1)
@@ -1289,8 +1298,10 @@ class LogisticRegression(BaseEstimator, LinearClassifierMixin,
12891298 - For multiclass problems, only 'newton-cg', 'sag', 'saga' and 'lbfgs'
12901299 handle multinomial loss; 'liblinear' is limited to one-versus-rest
12911300 schemes.
1292- - 'newton-cg', 'lbfgs' and 'sag' only handle L2 penalty, whereas
1293- 'liblinear' and 'saga' handle L1 penalty.
1301+ - 'newton-cg', 'lbfgs', 'sag' and 'saga' handle L2 or no penalty
1302+ - 'liblinear' and 'saga' also handle L1 penalty
1303+ - 'saga' also supports 'elasticnet' penalty
1304+
E865
- 'liblinear' does not handle no penalty
12941305
12951306 Note that 'sag' and 'saga' fast convergence is only guaranteed on
12961307 features with approximately the same scale. You can
@@ -1491,6 +1502,18 @@ def fit(self, X, y, sample_weight=None):
14911502 warnings .warn ("l1_ratio parameter is only used when penalty is "
14921503 "'elasticnet'. Got "
14931504 "(penalty={})" .format (self .penalty ))
1505+ if self .penalty == 'none' :
1506+ if self .C != 1.0 : # default values
1507+ warnings .warn (
1508+ "Setting penalty='none' will ignore the C and l1_ratio "
1509+ "parameters"
1510+ )
1511+ # Note that check for l1_ratio is done right above
1512+ C_ = np .inf
1513+ penalty = 'l2'
1514+ else :
1515+ C_ = self .C
1516+ penalty = self .penalty
14941517 if not isinstance (self .max_iter , numbers .Number ) or self .max_iter < 0 :
14951518 raise ValueError ("Maximum number of iteration must be positive;"
14961519 " got (max_iter=%r)" % self .max_iter )
@@ -1570,13 +1593,13 @@ def fit(self, X, y, sample_weight=None):
15701593 prefer = 'processes'
15711594 fold_coefs_ = Parallel (n_jobs = self .n_jobs , verbose = self .verbose ,
15721595 ** _joblib_parallel_args (prefer = prefer ))(
1573- path_func (X , y , pos_class = class_ , Cs = [self . C ],
1596+ path_func (X , y , pos_class = class_ , Cs = [C_ ],
15741597 l1_ratio = self .l1_ratio , fit_intercept = self .fit_intercept ,
15751598 tol = self .tol , verbose = self .verbose , solver = solver ,
15761599 multi_class = multi_class , max_iter = self .max_iter ,
15771600 class_weight = self .class_weight , check_input = False ,
15781601 random_state = self .random_state , coef = warm_start_coef_ ,
1579- penalty = self . penalty , max_squared_sum = max_squared_sum ,
1602+ penalty = penalty , max_squared_sum = max_squared_sum ,
15801603 sample_weight = sample_weight )
15811604 for class_ , warm_start_coef_ in zip (classes_ , warm_start_coef ))
15821605
@@ -1968,6 +1991,12 @@ def fit(self, X, y, sample_weight=None):
19681991
19691992 l1_ratios_ = [None ]
19701993
1994+ if self .penalty == 'none' :
1995+ <
E865
span class=pl-k>raise ValueError (
1996+ "penalty='none' is not useful and not supported by "
1997+ "LogisticRegressionCV."
1998+ )
1999+
19712000 X , y = check_X_y (X , y , accept_sparse = 'csr' , dtype = np .float64 ,
19722001 order = "C" ,
19732002 accept_large_sparse = solver != 'liblinear' )
0 commit comments