8000 Release 0.8.1 · scikit-optimize/scikit-optimize@de32b5f · GitHub
[go: up one dir, main page]

Skip to content
This repository was archived by the owner on Feb 28, 2024. It is now read-only.

Commit de32b5f

Browse files
committed
Release 0.8.1
Merge changes from #943
1 parent 4e391ec commit de32b5f

File tree

5 files changed

+41
-4
lines changed

5 files changed

+41
-4
lines changed

doc/templates/index.html

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,8 @@ <h4 class="sk-landing-call-header">News</h4>
109109
<li><strong>On-going development:</strong>
110110
<a href="https://scikit-optimize.github.io/dev/whats_new.html"><strong>What's new</strong> (Changelog)</a>
111111
</li>
112+
<li><strong>Sep 2020.</strong> scikit-optimize 0.8.1 (<a href 8000 ="whats_new/v0.8.html#version-0-8-1">Changelog</a>).
113+
<li><strong>Sep 2020.</strong> scikit-optimize 0.8 (<a href="whats_new/v0.8.html#version-0-8">Changelog</a>).
112114
<li><strong>Feb 2020.</strong> scikit-optimize 0.7.2 (<a href="whats_new/v0.7.html#version-0-7-2">Changelog</a>).
113115
<li><strong>Feb 2020.</strong> scikit-optimize 0.7.1 (<a href="whats_new/v0.7.html#version-0-7-1">Changelog</a>).
114116
<li><strong>Jan 2020.</strong> scikit-optimize 0.7 (<a href="whats_new/v0.7.html#version-0-7">Changelog</a>).

doc/whats_new/v0.8.rst

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,15 @@
22

33
.. currentmodule:: skopt
44

5+
.. _changes_0_8_1:
6+
7+
Version 0.8.1
8+
=============
9+
**September 2020**
10+
11+
- |Fix| GaussianProcessRegressor on sklearn 0.23 normalizes the
12+
variance to 1, which needs to reverted on predict.
13+
514
.. _changes_0_8:
615

716
Version 0.8.0

skopt/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
3030
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
3131
#
32-
__version__ = "0.8.0"
32+
__version__ = "0.8.1"
3333

3434
if __SKOPT_SETUP__:
3535
import sys

skopt/learning/gaussian_process/gpr.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -224,10 +224,15 @@ def fit(self, X, y):
224224
self.K_inv_ = L_inv.dot(L_inv.T)
225225

226226
# Fix deprecation warning #462
227-
if int(sklearn.__version__[2:4]) >= 19:
227+
if int(sklearn.__version__[2:4]) >= 23:
228+
self.y_train_std_ = self._y_train_std
228229
self.y_train_mean_ = self._y_train_mean
230+
elif int(sklearn.__version__[2:4]) >= 19:
231+
self.y_train_mean_ = self._y_train_mean
232+
self.y_train_std_ = 1
229233
else:
230234
self.y_train_mean_ = self.y_train_mean
235+
self.y_train_std_ = 1
231236

232237
return self
233238

@@ -309,11 +314,14 @@ def predict(self, X, return_std=False, return_cov=False,
309314
else: # Predict based on GP posterior
310315
K_trans = self.kernel_(X, self.X_train_)
311316
y_mean = K_trans.dot(self.alpha_) # Line 4 (y_mean = f_star)
312-
y_mean = self.y_train_mean_ + y_mean # undo normal.
317+
# undo normalisation
318+
y_mean = self.y_train_std_ * y_mean + self.y_train_mean_
313319

314320
if return_cov:
315321
v = cho_solve((self.L_, True), K_trans.T) # Line 5
316322
y_cov = self.kernel_(X) - K_trans.dot(v) # Line 6
323+
# undo normalisation
324+
y_cov = y_cov * self.y_train_std_**2
317325
return y_mean, y_cov
318326

319327
elif return_std:
@@ -330,17 +338,22 @@ def predict(self, X, return_std=False, return_cov=False,
330338
warnings.warn("Predicted variances smaller than 0. "
331339
"Setting those variances to 0.")
332340
y_var[y_var_negative] = 0.0
341+
# undo normalisation
342+
y_var = y_var * self.y_train_std_**2
333343
y_std = np.sqrt(y_var)
334344

335345
if return_mean_grad:
336346
grad = self.kernel_.gradient_x(X[0], self.X_train_)
337347
grad_mean = np.dot(grad.T, self.alpha_)
338-
348+
# undo normalisation
349+
grad_mean = grad_mean * self.y_train_std_
339350
if return_std_grad:
340351
grad_std = np.zeros(X.shape[1])
341352
if not np.allclose(y_std, grad_std):
342353
grad_std = -np.dot(K_trans,
343354
np.dot(K_inv, grad))[0] / y_std
355+
# undo normalisation
356+
grad_std = grad_std * self.y_train_std_**2
344357
return y_mean, y_std, grad_mean, grad_std
345358

346359
if return_std:

skopt/tests/test_acquisition.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,19 @@ def test_acquisition_gradient():
119119
check_gradient_correctness(X_new, gpr, acq_func, np.max(y))
120120

121121

122+
@pytest.mark.fast_test
123+
def test_acquisition_gradient_cookbook():
124+
rng = np.random.RandomState(0)
125+
X = rng.randn(20, 5)
126+
y = rng.randn(20)
127+
X_new = rng.randn(5)
128+
gpr = cook_estimator("GP", Space(((-5.0, 5.0),)), random_state=0)
129+
gpr.fit(X, y)
130+
131+
for acq_func in ["LCB", "PI", "EI"]:
132+
check_gradient_correctness(X_new, gpr, acq_func, np.max(y))
133+
134+
122135
@pytest.mark.fast_test
123136
@pytest.mark.parametrize("acq_func", ["EIps", "PIps"])
124137
def test_acquisition_per_second(acq_func):

0 commit comments

Comments
 (0)
0