diff --git a/sklearn/ensemble/tests/test_stacking.py b/sklearn/ensemble/tests/test_stacking.py index 88dd76cb0b49d..d6b4c385b9073 100644 --- a/sklearn/ensemble/tests/test_stacking.py +++ b/sklearn/ensemble/tests/test_stacking.py @@ -27,8 +27,6 @@ from sklearn.svm import LinearSVC from sklearn.svm import LinearSVR from sklearn.svm import SVC -from sklearn.tree import DecisionTreeClassifier -from sklearn.tree import DecisionTreeRegressor from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import RandomForestRegressor from sklearn.preprocessing import scale @@ -44,8 +42,6 @@ from sklearn.utils._testing import assert_allclose from sklearn.utils._testing import assert_allclose_dense_sparse from sklearn.utils._testing import ignore_warnings -from sklearn.utils.estimator_checks import check_estimator -from sklearn.utils.estimator_checks import check_no_attributes_set_in_init X_diabetes, y_diabetes = load_diabetes(return_X_y=True) X_iris, y_iris = load_iris(return_X_y=True) @@ -368,24 +364,6 @@ def test_stacking_randomness(estimator, X, y): ) -# These warnings are raised due to _BaseComposition -@pytest.mark.filterwarnings("ignore:TypeError occurred during set_params") -@pytest.mark.filterwarnings("ignore:Estimator's parameters changed after") -@pytest.mark.parametrize( - "estimator", - [StackingClassifier( - estimators=[('lr', LogisticRegression(random_state=0)), - ('tree', DecisionTreeClassifier(random_state=0))]), - StackingRegressor( - estimators=[('lr', LinearRegression()), - ('tree', DecisionTreeRegressor(random_state=0))])], - ids=['StackingClassifier', 'StackingRegressor'] -) -def test_check_estimators_stacking_estimator(estimator): - check_estimator(estimator) - check_no_attributes_set_in_init(estimator.__class__.__name__, estimator) - - def test_stacking_classifier_stratify_default(): # check that we stratify the classes for the default CV clf = StackingClassifier( diff --git a/sklearn/ensemble/tests/test_voting.py b/sklearn/ensemble/tests/test_voting.py index 7e8852f2d0f19..7fa1fce0f755d 100644 --- a/sklearn/ensemble/tests/test_voting.py +++ b/sklearn/ensemble/tests/test_voting.py @@ -7,8 +7,6 @@ from sklearn.utils._testing import assert_almost_equal, assert_array_equal from sklearn.utils._testing import assert_array_almost_equal from sklearn.utils._testing import assert_raise_message -from sklearn.utils.estimator_checks import check_estimator -from sklearn.utils.estimator_checks import check_no_attributes_set_in_init from sklearn.exceptions import NotFittedError from sklearn.linear_model import LinearRegression from sklearn.linear_model import LogisticRegression @@ -490,23 +488,6 @@ def test_none_estimator_with_weights(X, y, voter): assert y_pred.shape == y.shape -@pytest.mark.parametrize( - "estimator", - [VotingRegressor( - estimators=[('lr', LinearRegression()), - ('tree', DecisionTreeRegressor(random_state=0))]), - VotingClassifier( - estimators=[('lr', LogisticRegression(random_state=0)), - ('tree', DecisionTreeClassifier(random_state=0))])], - ids=['VotingRegressor', 'VotingClassifier'] -) -def test_check_estimators_voting_estimator(estimator): - # FIXME: to be removed when meta-estimators can specified themselves - # their testing parameters (for required parameters). - check_estimator(estimator) - check_no_attributes_set_in_init(estimator.__class__.__name__, estimator) - - @pytest.mark.parametrize( "est", [VotingRegressor( diff --git a/sklearn/utils/estimator_checks.py b/sklearn/utils/estimator_checks.py index 5b99e8e56c420..40a92fe63ab00 100644 --- a/sklearn/utils/estimator_checks.py +++ b/sklearn/utils/estimator_checks.py @@ -26,7 +26,9 @@ from ._testing import create_memmap_backed_data from ._testing import raises from . import is_scalar_nan + from ..discriminant_analysis import LinearDiscriminantAnalysis +from ..linear_model import LogisticRegression from ..linear_model import Ridge from ..base import ( @@ -343,10 +345,24 @@ def _construct_instance(Estimator): estimator = Estimator(Ridge()) else: estimator = Estimator(LinearDiscriminantAnalysis()) + elif required_parameters in (['estimators'],): + # Heterogeneous ensemble classes (i.e. stacking, voting) + if issubclass(Estimator, RegressorMixin): + estimator = Estimator(estimators=[ + ("est1", Ridge(alpha=0.1)), + ("est2", Ridge(alpha=1)) + ]) + else: + estimator = Estimator(estimators=[ + ("est1", LogisticRegression(C=0.1)), + ("est2", LogisticRegression(C=1)) + ]) else: - raise SkipTest("Can't instantiate estimator {} which requires " - "parameters {}".format(Estimator.__name__, - required_parameters)) + msg = (f"Can't instantiate estimator {Estimator.__name__} " + f"parameters {required_parameters}") + # raise additional warning to be shown by pytest + warnings.warn(msg, SkipTestWarning) + raise SkipTest(msg) else: estimator = Estimator() return estimator