|
20 | 20 | from sklearn.gaussian_process.tests._mini_sequence_kernel import MiniSeqKernel
|
21 | 21 | from sklearn.exceptions import ConvergenceWarning
|
22 | 22 |
|
23 |
| -from sklearn.utils._testing \ |
24 |
| - import (assert_array_less, |
25 |
| - assert_almost_equal, assert_array_almost_equal, |
26 |
| - assert_array_equal, assert_allclose) |
| 23 | +from sklearn.utils._testing import ( |
| 24 | + assert_array_less, |
| 25 | + assert_almost_equal, |
| 26 | + assert_array_almost_equal, |
| 27 | + assert_allclose |
| 28 | +) |
27 | 29 |
|
28 | 30 |
|
29 | 31 | def f(x):
|
@@ -185,7 +187,8 @@ def test_no_optimizer():
|
185 | 187 |
|
186 | 188 |
|
187 | 189 | @pytest.mark.parametrize('kernel', kernels)
|
188 |
| -def test_predict_cov_vs_std(kernel): |
| 190 | +@pytest.mark.parametrize("target", [y, np.ones(X.shape[0], dtype=np.float64)]) |
| 191 | +def test_predict_cov_vs_std(kernel, target): |
189 | 192 | if sys.maxsize <= 2 ** 32 and sys.version_info[:2] == (3, 6):
|
190 | 193 | pytest.xfail("This test may fail on 32bit Py3.6")
|
191 | 194 |
|
@@ -455,25 +458,6 @@ def test_no_fit_default_predict():
|
455 | 458 | assert_array_almost_equal(y_cov1, y_cov2)
|
456 | 459 |
|
457 | 460 |
|
458 |
| -@pytest.mark.parametrize('kernel', kernels) |
459 |
| -def test_K_inv_reset(kernel): |
460 |
| - y2 = f(X2).ravel() |
461 |
| - |
462 |
| - # Test that self._K_inv is reset after a new fit |
463 |
| - gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y) |
464 |
| - assert hasattr(gpr, '_K_inv') |
465 |
| - assert gpr._K_inv is None |
466 |
| - gpr.predict(X, return_std=True) |
467 |
| - assert gpr._K_inv is not None |
468 |
| - gpr.fit(X2, y2) |
469 |
| - assert gpr._K_inv is None |
470 |
| - gpr.predict(X2, return_std=True) |
471 |
| - gpr2 = GaussianProcessRegressor(kernel=kernel).fit(X2, y2) |
472 |
| - gpr2.predict(X2, return_std=True) |
473 |
| - # the value of K_inv should be independent of the first fit |
474 |
| - assert_array_equal(gpr._K_inv, gpr2._K_inv) |
475 |
| - |
476 |
| - |
477 | 461 | def test_warning_bounds():
|
478 | 462 | kernel = RBF(length_scale_bounds=[1e-5, 1e-3])
|
479 | 463 | gpr = GaussianProcessRegressor(kernel=kernel)
|
@@ -569,3 +553,28 @@ def test_constant_target(kernel):
|
569 | 553 | assert_allclose(y_pred, y_constant)
|
570 | 554 | # set atol because we compare to zero
|
571 | 555 | assert_allclose(np.diag(y_cov), 0., atol=1e-9)
|
| 556 | + |
| 557 | + |
| 558 | +def test_gpr_consistency_std_cov_non_invertible_kernel(): |
| 559 | + """Check the consistency between the returned std. dev. and the covariance. |
| 560 | + Non-regression test for: |
| 561 | + https://github.com/scikit-learn/scikit-learn/issues/19936 |
| 562 | + Inconsistencies were observed when the kernel cannot be inverted (or |
| 563 | + numerically stable). |
| 564 | + """ |
| 565 | + kernel = (C(8.98576054e+05, (1e-12, 1e12)) * |
| 566 | + RBF([5.91326520e+02, 1.32584051e+03], (1e-12, 1e12)) + |
| 567 | + WhiteKernel(noise_level=1e-5)) |
| 568 | + gpr = GaussianProcessRegressor(kernel=kernel, alpha=0, optimizer=None) |
| 569 | + X_train = np.array([[0., 0.], [1.54919334, -0.77459667], [-<
770E
/span>1.54919334, 0.], |
| 570 | + [0., -1.54919334], [0.77459667, 0.77459667], |
| 571 | + [-0.77459667, 1.54919334]]) |
| 572 | + y_train = np.array([[-2.14882017e-10], [-4.66975823e+00], [4.01823986e+00], |
| 573 | + [-1.30303674e+00], [-1.35760156e+00], |
| 574 | + [3.31215668e+00]]) |
| 575 | + gpr.fit(X_train, y_train) |
| 576 | + X_test = np.array([[-1.93649167, -1.93649167], [1.93649167, -1.93649167], |
| 577 | + [-1.93649167, 1.93649167], [1.93649167, 1.93649167]]) |
| 578 | + pred1, std = gpr.predict(X_test, return_std=True) |
| 579 | + pred2, cov = gpr.predict(X_test, return_cov=True) |
| 580 | + assert_allclose(std, np.sqrt(np.diagonal(cov)), rtol=1e-5) |
0 commit comments