8000 Make convergence warnings appear when verbose = 0 in linear models. · AlexandreSev/scikit-learn@5d10ba5 · GitHub 8000
[go: up one dir, main page]

Skip to content

Commit 5d10ba5

Browse files
committed
Make convergence warnings appear when verbose = 0 in linear models.
For lbfgs and liblinears solvers, the convergence warnings appeared only when verbose was greater than 0, whereas they appeared with verbose = 0 with other solvers. Create test to check the convergence warning in logistic regression and in linear svm. Update `test_search` to ignore this convergence warning. Fixes scikit-learn#10866
1 parent 4989a95 commit 5d10ba5

File tree

5 files changed

+19
-14
lines changed

5 files changed

+19
-14
lines changed

sklearn/linear_model/logistic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -707,7 +707,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
707707
func, w0, fprime=None,
708708
args=(X, target, 1. / C, sample_weight),
709709
iprint=(verbose > 0) - 1, pgtol=tol, maxiter=max_iter)
710-
if info["warnflag"] == 1 and verbose > 0:
710+
if info["warnflag"] == 1:
711711
warnings.warn("lbfgs failed to converge. Increase the number "
712712
"of iterations.", ConvergenceWarning)
713713
# In scipy <= 1.0.0, nit may exceed maxiter.

sklearn/linear_model/tests/test_logistic.py

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -800,15 +800,6 @@ def test_logistic_regression_class_weights():
800800
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
801801

802802

803-
def test_logistic_regression_convergence_warnings():
804-
# Test that warnings are raised if model does not converge
805-
806-
X, y = make_classification(n_samples=20, n_features=20, random_state=0)
807-
clf_lib = LogisticRegression(solver='liblinear', max_iter=2, verbose=1)
808-
assert_warns(ConvergenceWarning, clf_lib.fit, X, y)
809-
assert_equal(clf_lib.n_iter_, 2)
810-
811-
812803
def test_logistic_regression_multinomial():
813804
# Tests for the multinomial option in logistic regression
814805

@@ -1033,7 +1024,6 @@ def test_logreg_predict_proba_multinomial():
10331024
assert_greater(clf_wrong_loss, clf_multi_loss)
10341025

10351026

1036-
@ignore_warnings
10371027
def test_max_iter():
10381028
# Test that the maximum number of iteration is reached
10391029
X, y_bin = iris.data, iris.target.copy()
@@ -1049,7 +1039,7 @@ def test_max_iter():
10491039
lr = LogisticRegression(max_iter=max_iter, tol=1e-15,
10501040
multi_class=multi_class,
10511041
random_state=0, solver=solver)
1052-
lr.fit(X, y_bin)
1042+
assert_warns(ConvergenceWarning, lr.fit, X, y_bin)
10531043
assert_equal(lr.n_iter_[0], max_iter)
10541044

10551045

sklearn/model_selection/tests/test_search.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
from sklearn.base import BaseEstimator
3434
from sklearn.base import clone
3535
from sklearn.exceptions import NotFittedError
36+
from sklearn.exceptions import ConvergenceWarning
3637
from sklearn.datasets import make_classification
3738
from sklearn.datasets import make_blobs
3839
from sklearn.datasets import make_multilabel_classification
@@ -350,7 +351,11 @@ def test_return_train_score_warn():
350351
for estimator in estimators:
351352
for val in [True, False, 'warn']:
352353
estimator.set_params(return_train_score=val)
353-
result[val] = assert_no_warnings(estimator.fit, X, y).cv_results_
354+
result[val] = assert_no_warnings(
355+
ignore_warnings(estimator.fit, category=ConvergenceWarning),
356+
X,
357+
y
358+
).cv_results_
354359

355360
train_keys = ['split0_train_score', 'split1_train_score',
356361
'split2_train_score', 'mean_train_score', 'std_train_score']

sklearn/svm/base.py

Lines changed: 1 addition & 1 deletion
8000
Original file line numberDiff line numberDiff line change
@@ -907,7 +907,7 @@ def _fit_liblinear(X, y, C, fit_intercept, intercept_scaling, class_weight,
907907
# on 32-bit platforms, we can't get to the UINT_MAX limit that
908908
# srand supports
909909
n_iter_ = max(n_iter_)
910-
if n_iter_ >= max_iter and verbose > 0:
910+
if n_iter_ >= max_iter:
911911
warnings.warn("Liblinear failed to converge, increase "
912912
"the number of iterations.", ConvergenceWarning)
913913

sklearn/svm/tests/test_svm.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -849,6 +849,16 @@ def test_timeout():
849849
assert_warns(ConvergenceWarning, a.fit, X, Y)
850850

851851

852+
def test_convergence_warning_linear_svm():
853+
# Test if a convergence warning is raised when verbose = 0.
854+
855+
linear_svc = svm.LinearSVC(random_state 6B2A =0, max_iter=1)
856+
assert_warns(ConvergenceWarning, linear_svc.fit, X, Y)
857+
858+
linear_svr = svm.LinearSVR(random_state=0, max_iter=1)
859+
assert_warns(ConvergenceWarning, linear_svr.fit, iris.data, iris.target)
860+
861+
852862
def test_unfitted():
853863
X = "foo!" # input validation not required when SVM not fitted
854864

0 commit comments

Comments
 (0)
0