8000 Return string_param when white noise is present in sum · scikit-optimize/scikit-optimize@c6ffe6c · GitHub
[go: up one dir, main page]

8000 Skip to content
This repository was archived by the owner on Feb 28, 2024. It is now read-only.

Commit c6ffe6c

Browse files
committed
Return string_param when white noise is present in sum
1 parent d73f058 commit c6ffe6c

File tree

3 files changed

+87
-31
lines changed

3 files changed

+87
-31
lines changed

skopt/learning/gpr.py

Lines changed: 51 additions & 28 deletions
67E6
Original file line numberDiff line numberDiff line change
@@ -3,20 +3,40 @@
33
from sklearn.gaussian_process.kernels import Sum
44

55

6-
def _check_WhiteKernel_in_Sum(kernel):
6+
def _param_for_white_kernel_in_Sum(kernel, kernel_str=""):
7+
"""
8+
Check if a WhiteKernel exists in a Sum Kernel
9+
and if it does return the corresponding key in
10+
`kernel.get_params()`
11+
"""
12+
if kernel_str != "":
13+
kernel_str = kernel_str + "__"
714
if isinstance(kernel, Sum):
8-
return (
9-
_check_WhiteKernel_in_Sum(kernel.k1) or
10-
_check_WhiteKernel_in_Sum(kernel.k2)
11-
)
12-
else:
13-
return isinstance(kernel, WhiteKernel)
15+
for param, child in kernel.get_params(deep=False).items():
16+
if isinstance(child, WhiteKernel):
17+
return True, kernel_str + param
18+
else:
19+
present, child_str = _param_for_white_kernel_in_Sum(
20+
child, kernel_str + param)
21+
if present:
22+
return True, child_str
23+
return False, "_"
1424

1525

1626
class GaussianProcessRegressor(sk_GaussianProcessRegressor):
1727
"""
1828
GaussianProcessRegressor that allows noise tunability.
1929
"""
30+
def __init__(self, kernel=None, alpha=1e-10,
31+
optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0,
32+
normalize_y=False, copy_X_train=True, random_state=None,
33+
noise=None):
34+
self.noise = noise
35+
super(GaussianProcessRegressor, self).__init__(
36+
kernel=kernel, alpha=alpha, optimizer=optimizer,
37+
n_restarts_optimizer=n_restarts_optimizer,
38+
normalize_y=normalize_y, copy_X_train=copy_X_train,
39+
random_state=random_state)
2040

2141
def fit(self, X, y):
2242
"""Fit Gaussian process regression model
@@ -33,24 +53,27 @@ def fit(self, X, y):
3353
-------
3454
self : returns an instance of self.
3555
"""
36-
super(GaussianProcessRegressor, self).fit(X, y)
37-
for param, value in self.kernel_.get_params().items():
38-
if param.endswith('noise_level'):
39-
self.noise_ = value
40-
break
41-
42-
# The noise component of this kernel should be set to zero
43-
# while estimating K(X, X_test) and K(X_test, X_test)
44-
# Note that the term K(X, X) should include the noise but
45-
# this (K(X, X))^-1y is precomputed as the attribute `alpha_`.
46-
# (Notice the underscore).
47-
# This has been described in Eq 2.24 of
48-
# http://www.gaussianprocess.org/gpml/chapters/RW2.pdf
49-
if isinstance(self.kernel_, WhiteKernel):
50-
self.kernel_.set_params(noise_level=0.0)
51-
elif _check_WhiteKernel_in_Sum(self.kernel_):
52-
for param, value in self.kernel_.get_params().items():
53-
if isinstance(value, WhiteKernel):
54-
self.kernel_.set_params(
55-
**{param: WhiteKernel(noise_level=0.0)})
56-
return self
56+
if noise is None:
57+
# Nothing special
58+
return super(GaussianProcessRegressor, self).fit(X, y)
59+
# else:
60+
61+
# The noise component of this kernel should be set to zero
62+
# while estimating K(X, X_test) and K(X_test, X_test)
63+
# Note that the term K(X, X) should include the noise but
64+
# this (K(X, X))^-1y is precomputed as the attribute `alpha_`.
65+
# (Notice the underscore).
66+
# This has been described in Eq 2.24 of
67+
# http://www.gaussianprocess.org/gpml/chapters/RW2.pdf
68+
# Hence this hack
69+
# self._gp = sk_GaussianProcessRegressor()
70+
# params = self.get_params().copy()
71+
# params.pop['noise']
72+
# self._gp.set_params(**params)
73+
# self._gp.set_params(kernel=self.kernel + WhiteKernel())
74+
# self._gp.fit(X, y)
75+
# white_present, white_param = param_for_white_kernel_in_Sum(
76+
# self._gp.kernel_)
77+
# if white_present:
78+
# self._gp.kernel_.set_params(
79+
# **{white_param: WhiteKernel(noise_level=0.0)})

skopt/learning/tests/test_gpr.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import numpy as np
2+
3+
from sklearn.gaussian_process.kernels import RBF
4+
from sklearn.gaussian_process.kernels import Matern
5+
from sklearn.gaussian_process.kernels import WhiteKernel
6+
from sklearn.utils.testing import assert_false
7+
from sklearn.utils.testing import assert_true
8+
from sklearn.utils.testing import assert_array_equal
9+
10+
from skopt.learning.gpr import _param_for_white_kernel_in_Sum
11+
12+
rng = np.random.RandomState(0)
13+
X = rng.randn(5, 5)
14+
15+
rbf = RBF()
16+
wk = WhiteKernel()
17+
mat = Matern()
18+
kernel1 = rbf
19+
kernel2 = mat + rbf
20+
kernel3 = mat * rbf
21+
kernel4 = wk * rbf
22+
kernel5 = mat + rbf * wk
23+
24+
25+
def test_param_for_white_kernel_in_Sum():
26+
for kernel in [kernel1, kernel2, kernel3, kernel4]:
27+
kernel_with_noise = kernel + wk
28+
wk_present, wk_param = _param_for_white_kernel_in_Sum(kernel + wk)
29+
assert_true(wk_present)
30+
kernel_with_noise.set_params(
31+
**{wk_param: WhiteKernel(noise_level=0.0)})
32+
assert_array_equal(kernel_with_noise(X), kernel(X))
33+
34+
assert_false(_param_for_white_kernel_in_Sum(kernel5)[0])

skopt/optimizer/gp.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -176,10 +176,9 @@ def gp_minimize(func, dimensions, base_estimator=None,
176176
matern = Matern(length_scale=np.ones(space.transformed_n_dims),
177177
length_scale_bounds=[(0.01, 100)] * space.transformed_n_dims,
178178
nu=2.5)
179-
noise = WhiteKernel()
180179
base_estimator = GaussianProcessRegressor(
181-
kernel=cov_amplitude * matern + noise,
182-
normalize_y=True, random_state=random_state, alpha=0.0)
180+
kernel=cov_amplitude * matern,
181+
normalize_y=True, random_state=random_state, alpha=0.0, noise="gaussian")
183182

184183
return base_minimize(
185184
func, dimensions, base_estimator=base_estimator,

0 commit comments

Comments
 (0)
0