|
9 | 9 | from sklearn.utils.testing import assert_equal |
10 | 10 | from sklearn.utils.testing import assert_almost_equal |
11 | 11 | from sklearn.utils.testing import assert_raises |
| 12 | +from sklearn.utils.testing import assert_raise_message |
12 | 13 | from sklearn.utils.testing import assert_true |
13 | 14 | from sklearn.utils.testing import assert_array_equal |
14 | 15 | from sklearn.utils.testing import assert_array_almost_equal |
@@ -251,10 +252,12 @@ def test_select_kbest_zero(): |
251 | 252 | shuffle=False, random_state=0) |
252 | 253 |
|
253 | 254 | univariate_filter = SelectKBest(f_classif, k=0) |
254 | | - univariate_filter.fit(X, y).transform(X) |
| 255 | + univariate_filter.fit(X, y) |
255 | 256 | support = univariate_filter.get_support() |
256 | 257 | gtruth = np.zeros(10, dtype=bool) |
257 | 258 | assert_array_equal(support, gtruth) |
| 259 | + assert_raise_message(ValueError, 'No features were selected', |
| 260 | + univariate_filter.transform, X) |
258 | 261 |
|
259 | 262 |
|
260 | 263 | def test_select_fpr_classif(): |
@@ -585,3 +588,17 @@ def test_f_classif_constant_feature(): |
585 | 588 | X, y = make_classification(n_samples=10, n_features=5) |
586 | 589 | X[:, 0] = 2.0 |
587 | 590 | assert_warns(UserWarning, f_classif, X, y) |
| 591 | + |
| 592 | + |
| 593 | +def test_no_feature_selected(): |
| 594 | + rng = np.random.RandomState(0) |
| 595 | + |
| 596 | + # Generate random uncorrelated data: a strict univariate test should |
| 597 | + # rejects all the features |
| 598 | + X = rng.rand(40, 10) |
| 599 | + y = rng.randint(0, 4, size=40) |
| 600 | + fdr = SelectFdr(alpha=0.00001).fit(X, y) |
| 601 | + assert_array_equal(fdr.get_support(), np.zeros(10)) |
| 602 | + |
| 603 | + assert_raise_message(ValueError, 'No features were selected', |
| 604 | + fdr.transform, X) |
0 commit comments