-
-
Notifications
You must be signed in to change notification settings - Fork 25.9k
[MRG] Fix warnings during tests #5297
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
92e65cf
3f85b0e
54823f0
8d28c32
8f1ec1c
8d25064
c70935e
8c8b88c
47ad2c9
e2befde
ad3fb71
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -333,7 +333,7 @@ def objective_function(coef): | |
lars_coef_ = lars.coef_ | ||
lars_obj = objective_function(lars_coef_) | ||
|
||
coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-10, normalize=False) | ||
coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-4, normalize=False) | ||
cd_coef_ = coord_descent.fit(X, y).coef_ | ||
cd_obj = objective_function(cd_coef_) | ||
|
||
|
@@ -360,6 +360,7 @@ def test_lars_n_nonzero_coefs(verbose=False): | |
assert_equal(len(lars.alphas_), 7) | ||
|
||
|
||
@ignore_warnings | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There is a ConvergenceWarning caused due to the the very low value of tolerance set in L336, It can be fixed by setting tol to 1e-4 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Will do |
||
def test_multitarget(): | ||
# Assure that estimators receiving multidimensional y do the right thing | ||
X = diabetes.data | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -347,13 +347,14 @@ def test_decision_function_shape(): | |
assert_equal(dec.shape, (len(X_train), 10)) | ||
|
||
# check deprecation warning | ||
clf.decision_function_shape = None | ||
clf = svm.SVC(kernel='linear', C=0.1).fit(X_train, y_train) | ||
msg = "change the shape of the decision function" | ||
dec = assert_warns_message(ChangedBehaviorWarning, msg, | ||
clf.decision_function, X_train) | ||
assert_equal(dec.shape, (len(X_train), 10)) | ||
|
||
|
||
@ignore_warnings | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It shows me one more |
||
def test_svr_decision_function(): | ||
# Test SVR's decision_function | ||
# Sanity check, test that decision_function implemented in python | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,6 +21,7 @@ | |
|
||
import sklearn | ||
from sklearn.cluster.bicluster import BiclusterMixin | ||
from sklearn.decomposition import ProjectedGradientNMF | ||
|
||
from sklearn.linear_model.base import LinearClassifierMixin | ||
from sklearn.utils.estimator_checks import ( | ||
|
@@ -66,8 +67,12 @@ def test_non_meta_estimators(): | |
if name.startswith("_"): | ||
continue | ||
for check in _yield_all_checks(name, Estimator): | ||
yield check, name, Estimator | ||
|
||
if issubclass(Estimator, ProjectedGradientNMF): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You can add a comment above on why this is done, in order to avoid confusion here in the future. |
||
# The ProjectedGradientNMF class is deprecated | ||
with ignore_warnings(): | ||
yield check, name, Estimator | ||
else: | ||
yield check, name, Estimator | ||
|
||
def test_configure(): | ||
# Smoke test the 'configure' step of setup, this tests all the | ||
|
@@ -180,14 +185,24 @@ def test_non_transformer_estimators_n_iter(): | |
def test_transformer_n_iter(): | ||
transformers = all_estimators(type_filter='transformer') | ||
for name, Estimator in transformers: | ||
estimator = Estimator() | ||
if issubclass(Estimator, ProjectedGradientNMF): | ||
# The ProjectedGradientNMF class is deprecated | ||
with ignore_warnings(): | ||
estimator = Estimator() | ||
else: | ||
estimator = Estimator() | ||
# Dependent on external solvers and hence accessing the iter | ||
# param is non-trivial. | ||
external_solver = ['Isomap', 'KernelPCA', 'LocallyLinearEmbedding', | ||
'RandomizedLasso', 'LogisticRegressionCV'] | ||
|
||
if hasattr(estimator, "max_iter") and name not in external_solver: | ||
yield check_transformer_n_iter, name, estimator | ||
if isinstance(estimator, ProjectedGradientNMF): | ||
# The ProjectedGradientNMF class is deprecated | ||
with ignore_warnings(): | ||
yield check_transformer_n_iter, name, estimator | ||
else: | ||
yield check_transformer_n_iter, name, estimator | ||
|
||
|
||
def test_get_params_invariance(): | ||
|
@@ -198,4 +213,9 @@ def test_get_params_invariance(): | |
estimators = all_estimators(include_meta_estimators=False, include_other=True) | ||
for name, Estimator in estimators: | ||
if hasattr(Estimator, 'get_params'): | ||
yield check_get_params_invariance, name, Estimator | ||
# The ProjectedGradientNMF class is deprecated | ||
if issubclass(Estimator, ProjectedGradientNMF): | ||
with ignore_warnings(): | ||
yield check_get_params_invariance, name, Estimator | ||
else: | ||
yield check_get_params_invariance, name, Estimator |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just a note for future: There seems to be a runtime warning due to division by zero.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
You mean due to the changes I have made ?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Nope, not due to your changes. Some independent warning.