3
3
from sklearn .gaussian_process .kernels import Sum
4
4
5
5
6
- def _check_WhiteKernel_in_Sum (kernel ):
6
+ def _param_for_white_kernel_in_Sum (kernel , kernel_str = "" ):
7
+ """
8
+ Check if a WhiteKernel exists in a Sum Kernel
9
+ and if it does return the corresponding key in
10
+ `kernel.get_params()`
11
+ """
12
+ if kernel_str != "" :
13
+ kernel_str = kernel_str + "__"
7
14
if isinstance (kernel , Sum ):
8
- return (
9
- _check_WhiteKernel_in_Sum (kernel .k1 ) or
10
- _check_WhiteKernel_in_Sum (kernel .k2 )
11
- )
12
- else :
13
- return isinstance (kernel , WhiteKernel )
15
+ for param , child in kernel .get_params (deep = False ).items ():
16
+ if isinstance (child , WhiteKernel ):
17
+ return True , kernel_str + param
18
+ else :
19
+ present , child_str = _param_for_white_kernel_in_Sum (
20
+ child , kernel_str + param )
21
+ if present :
22
+ return True , child_str
23
+ return False , "_"
14
24
15
25
16
26
class GaussianProcessRegressor (sk_GaussianProcessRegressor ):
17
27
"""
18
28
GaussianProcessRegressor that allows noise tunability.
19
29
"""
30
+ def __init__ (self , kernel = None , alpha = 1e-10 ,
31
+ optimizer = "fmin_l_bfgs_b" , n_restarts_optimizer = 0 ,
32
+ normalize_y = False , copy_X_train = True , random_state = None ,
33
+ noise = None ):
34
+ self .noise = noise
35
+ super (GaussianProcessRegressor , self ).__init__ (
36
+ kernel = kernel , alpha = alpha , optimizer = optimizer ,
37
+ n_restarts_optimizer = n_restarts_optimizer ,
38
+ normalize_y = normalize_y , copy_X_train = copy_X_train ,
39
+ random_state = random_state )
20
40
21
41
def fit (self , X , y ):
22
42
"""Fit Gaussian process regression model
@@ -33,24 +53,27 @@ def fit(self, X, y):
33
53
-------
34
54
self : returns an instance of self.
35
55
"""
36
- super (GaussianProcessRegressor, self ).fit (X , y )
37
- for param , value in self .kernel_ .get_params ().items ():
38
- if param .endswith ('noise_level' ):
39
- self .noise_ = value
40
- break
41
-
42
- # The noise component of this kernel should be set to zero
43
- # while estimating K(X, X_test) and K(X_test, X_test)
44
- # Note that the term K(X, X) should include the noise but
45
- # this (K(X, X))^-1y is precomputed as the attribute `alpha_`.
46
- # (Notice the underscore).
47
- # This has been described in Eq 2.24 of
48
- # http://www.gaussianprocess.org/gpml/chapters/RW2.pdf
49
- if isinstance (self .kernel_ , WhiteKernel ):
50
- self .kernel_ .set_params (noise_level = 0.0 )
51
- elif _check_WhiteKernel_in_Sum (self .kernel_ ):
52
- for param , value in self .kernel_ .get_params ().items ():
53
- if isinstance (value , WhiteKernel ):
54
- self .kernel_ .set_params (
55
- ** {param : WhiteKernel (noise_level = 0.0 )})
56
- return self
56
+ if noise is None :
57
+ # Nothing special
58
+ return super (GaussianProcessRegressor , self ).fit (X , y )
59
+ # else:
60
+
61
+ # The noise component of this kernel should be set to zero
62
+ # while estimating K(X, X_test) and K(X_test, X_test)
63
+ # Note that the term K(X, X) should include the noise but
64
+ # this (K(X, X))^-1y is precomputed as the attribute `alpha_`.
65
+ # (Notice the underscore).
66
+ # This has been described in Eq 2.24 of
67
+ # http://www.gaussianprocess.org/gpml/chapters/RW2.pdf
68
+ # Hence this hack
69
+ # self._gp = sk_GaussianProcessRegressor()
70
+ # params = self.get_params().copy()
71
+ # params.pop['noise']
72
+ # self._gp.set_params(**params)
73
+ # self._gp.set_params(kernel=self.kernel + WhiteKernel())
74
+ # self._gp.fit(X, y)
75
+ # white_present, white_param = param_for_white_kernel_in_Sum(
76
+ # self._gp.kernel_)
77
+ # if white_present:
78
+ # self._gp.kernel_.set_params(
79
+ # **{white_param: WhiteKernel(noise_level=0.0)})
67E6
0 commit comments