8000 [MRG] Fix warnings during tests by vighneshbirodkar · Pull Request #5297 · scikit-learn/scikit-learn · GitHub
[go: up one dir, main page]

Skip to content

[MRG] Fix warnings during tests #5297

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 11 commits into from
8 changes: 4 additions & 4 deletions sklearn/ensemble/tests/test_bagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,15 +114,15 @@ def fit(self, X, y):
for f in ['predict', 'predict_proba', 'predict_log_proba', 'decision_function']:
# Trained on sparse format
sparse_classifier = BaggingClassifier(
base_estimator=CustomSVC(),
base_estimator=CustomSVC(decision_function_shape='ovr'),
random_state=1,
**params
).fit(X_train_sparse, y_train)
sparse_results = getattr(sparse_classifier, f)(X_test_sparse)

# Trained on dense format
dense_classifier = BaggingClassifier(
base_estimator=CustomSVC(),
base_estimator=CustomSVC(decision_function_shape='ovr'),
random_state=1,
**params
).fit(X_train, y_train)
Expand Down Expand Up @@ -439,7 +439,7 @@ def test_parallel_classification():
assert_array_almost_equal(y1, y3)

# decision_function
ensemble = BaggingClassifier(SVC(),
ensemble = BaggingClassifier(SVC(decision_function_shape='ovr'),
n_jobs=3,
random_state=0).fit(X_train, y_train)

Expand All @@ -449,7 +449,7 @@ def test_parallel_classification():
decisions2 = ensemble.decision_function(X_test)
assert_array_almost_equal(decisions1, decisions2)

ensemble = BaggingClassifier(SVC(),
ensemble = BaggingClassifier(SVC(decision_function_shape='ovr'),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a note for future: There seems to be a runtime warning due to division by zero.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You mean due to the changes I have made ?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nope, not due to your changes. Some independent warning.

n_jobs=1,
random_state=0).fit(X_train, y_train)

Expand Down
3 changes: 2 additions & 1 deletion sklearn/linear_model/tests/test_least_angle.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ def objective_function(coef):
lars_coef_ = lars.coef_
lars_obj = objective_function(lars_coef_)

coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-10, normalize=False)
coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-4, normalize=False)
cd_coef_ = coord_descent.fit(X, y).coef_
cd_obj = objective_function(cd_coef_)

Expand All @@ -360,6 +360,7 @@ def test_lars_n_nonzero_coefs(verbose=False):
assert_equal(len(lars.alphas_), 7)


@ignore_warnings
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a ConvergenceWarning caused due to the the very low value of tolerance set in L336, It can be fixed by setting tol to 1e-4

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will do

def test_multitarget():
# Assure that estimators receiving multidimensional y do the right thing
X = diabetes.data
Expand Down
2 changes: 2 additions & 0 deletions sklearn/linear_model/tests/test_sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from sklearn.utils.testing import assert_false, assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import ignore_warnings

from sklearn import linear_model, datasets, metrics
from sklearn.base import clone
Expand Down Expand Up @@ -1012,6 +1013,7 @@ def test_elasticnet_convergence(self):
assert_almost_equal(cd.coef_, sgd.coef_, decimal=2,
err_msg=err_msg)

@ignore_warnings
def test_partial_fit(self):
third = X.shape[0] // 3
clf = self.factory(alpha=0.01)
Expand Down
9 changes: 6 additions & 3 deletions sklearn/svm/tests/test_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,10 @@ def test_svc():
kernels = ["linear", "poly", "rbf", "sigmoid"]
for dataset in datasets:
for kernel in kernels:
clf = svm.SVC(kernel=kernel, probability=True, random_state=0)
sp_clf = svm.SVC(kernel=kernel, probability=True, random_state=0)
clf = svm.SVC(kernel=kernel, probability=True, random_state=0,
decision_function_shape='ovo')
sp_clf = svm.SVC(kernel=kernel, probability=True, random_state=0,
decision_function_shape='ovo')
check_svm_model_equal(clf, sp_clf, *dataset)


Expand Down Expand Up @@ -148,7 +150,8 @@ def test_sparse_decision_function():
#returns the same as the one in libsvm

# multi class:
clf = svm.SVC(kernel='linear', C=0.1).fit(iris.data, iris.target)
svc = svm.SVC(kernel='linear', C=0.1, decision_function_shape='ovo')
clf = svc.fit(iris.data, iris.target)

dec = safe_sparse_dot(iris.data, clf.coef_.T) + clf.intercept_

Expand Down
3 changes: 2 additions & 1 deletion sklearn/svm/tests/test_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,13 +347,14 @@ def test_decision_function_shape():
assert_equal(dec.shape, (len(X_train), 10))

# check deprecation warning
clf.decision_function_shape = None
clf = svm.SVC(kernel='linear', C=0.1).fit(X_train, y_train)
msg = "change the shape of the decision function"
dec = assert_warns_message(ChangedBehaviorWarning, msg,
clf.decision_function, X_train)
assert_equal(dec.shape, (len(X_train), 10))


@ignore_warnings
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It shows me one more ChangedBehaviorWarning in this file.

def test_svr_decision_function():
# Test SVR's decision_function
# Sanity check, test that decision_function implemented in python
Expand Down
2 changes: 2 additions & 0 deletions sklearn/tests/test_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
assert_greater_equal,
assert_array_equal,
assert_raises,
ignore_warnings,
assert_warns_message)
from sklearn.datasets import make_classification, make_blobs
from sklearn.naive_bayes import MultinomialNB
Expand All @@ -23,6 +24,7 @@
from sklearn.calibration import calibration_curve


@ignore_warnings
def test_calibration():
"""Test calibration objects with isotonic and sigmoid"""
n_samples = 100
Expand Down
30 changes: 25 additions & 5 deletions sklearn/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import sklearn
from sklearn.cluster.bicluster import BiclusterMixin
from sklearn.decomposition import ProjectedGradientNMF

from sklearn.linear_model.base import LinearClassifierMixin
from sklearn.utils.estimator_checks import (
Expand Down Expand Up @@ -66,8 +67,12 @@ def test_non_meta_estimators():
if name.startswith("_"):
continue
for check in _yield_all_checks(name, Estimator):
yield check, name, Estimator

if issubclass(Estimator, ProjectedGradientNMF):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can add a comment above on why this is done, in order to avoid confusion here in the future.

# The ProjectedGradientNMF class is deprecated
with ignore_warnings():
yield check, name, Estimator
else:
yield check, name, Estimator

def test_configure():
# Smoke test the 'configure' step of setup, this tests all the
Expand Down Expand Up @@ -180,14 +185,24 @@ def test_non_transformer_estimators_n_iter():
def test_transformer_n_iter():
transformers = all_estimators(type_filter='transformer')
for name, Estimator in transformers:
estimator = Estimator()
if issubclass(Estimator, ProjectedGradientNMF):
# The ProjectedGradientNMF class is deprecated
with ignore_warnings():
estimator = Estimator()
else:
estimator = Estimator()
# Dependent on external solvers and hence accessing the iter
# param is non-trivial.
external_solver = ['Isomap', 'KernelPCA', 'LocallyLinearEmbedding',
'RandomizedLasso', 'LogisticRegressionCV']

if hasattr(estimator, "max_iter") and name not in external_solver:
yield check_transformer_n_iter, name, estimator
if isinstance(estimator, ProjectedGradientNMF):
# The ProjectedGradientNMF class is deprecated
with ignore_warnings():
yield check_transformer_n_iter, name, estimator
else:
yield check_transformer_n_iter, name, estimator


def test_get_params_invariance():
Expand All @@ -198,4 +213,9 @@ def test_get_params_invariance():
estimators = all_estimators(include_meta_estimators=False, include_other=True)
for name, Estimator in estimators:
if hasattr(Estimator, 'get_params'):
yield check_get_params_invariance, name, Estimator
# The ProjectedGradientNMF class is deprecated
if issubclass(Estimator, ProjectedGradientNMF):
with ignore_warnings():
yield check_get_params_invariance, name, Estimator
else:
yield check_get_params_invariance, name, Estimator
2 changes: 1 addition & 1 deletion sklearn/tests/test_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def test_pipeline_methods_preprocessing_svm():
n_classes = len(np.unique(y))
scaler = StandardScaler()
pca = RandomizedPCA(n_components=2, whiten=True)
clf = SVC(probability=True, random_state=0)
clf = SVC(probability=True, random_state=0, decision_function_shape='ovr')

for preprocessing in [scaler, pca]:
pipe = Pipeline([('preprocess', preprocessing), ('svc', clf)])
Expand Down
Loading
0