Skip to content
This repository has been archived by the owner on Feb 28, 2024. It is now read-only.

Commit

Permalink
Release 0.8.1
Browse files Browse the repository at this point in the history
Merge changes from #943
  • Loading branch information
holgern committed Sep 4, 2020
1 parent 4e391ec commit de32b5f
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 4 deletions.
2 changes: 2 additions & 0 deletions doc/templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ <h4 class="sk-landing-call-header">News</h4>
<li><strong>On-going development:</strong>
<a href="https://scikit-optimize.github.io/dev/whats_new.html"><strong>What's new</strong> (Changelog)</a>
</li>
<li><strong>Sep 2020.</strong> scikit-optimize 0.8.1 (<a href="whats_new/v0.8.html#version-0-8-1">Changelog</a>).
<li><strong>Sep 2020.</strong> scikit-optimize 0.8 (<a href="whats_new/v0.8.html#version-0-8">Changelog</a>).
<li><strong>Feb 2020.</strong> scikit-optimize 0.7.2 (<a href="whats_new/v0.7.html#version-0-7-2">Changelog</a>).
<li><strong>Feb 2020.</strong> scikit-optimize 0.7.1 (<a href="whats_new/v0.7.html#version-0-7-1">Changelog</a>).
<li><strong>Jan 2020.</strong> scikit-optimize 0.7 (<a href="whats_new/v0.7.html#version-0-7">Changelog</a>).
Expand Down
9 changes: 9 additions & 0 deletions doc/whats_new/v0.8.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@

.. currentmodule:: skopt

.. _changes_0_8_1:

Version 0.8.1
=============
**September 2020**

- |Fix| GaussianProcessRegressor on sklearn 0.23 normalizes the
variance to 1, which needs to reverted on predict.

.. _changes_0_8:

Version 0.8.0
Expand Down
2 changes: 1 addition & 1 deletion skopt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = "0.8.0"
__version__ = "0.8.1"

if __SKOPT_SETUP__:
import sys
Expand Down
19 changes: 16 additions & 3 deletions skopt/learning/gaussian_process/gpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,15 @@ def fit(self, X, y):
self.K_inv_ = L_inv.dot(L_inv.T)

# Fix deprecation warning #462
if int(sklearn.__version__[2:4]) >= 19:
if int(sklearn.__version__[2:4]) >= 23:
self.y_train_std_ = self._y_train_std
self.y_train_mean_ = self._y_train_mean
elif int(sklearn.__version__[2:4]) >= 19:
self.y_train_mean_ = self._y_train_mean
self.y_train_std_ = 1
else:
self.y_train_mean_ = self.y_train_mean
self.y_train_std_ = 1

return self

Expand Down Expand Up @@ -309,11 +314,14 @@ def predict(self, X, return_std=False, return_cov=False,
else: # Predict based on GP posterior
K_trans = self.kernel_(X, self.X_train_)
y_mean = K_trans.dot(self.alpha_) # Line 4 (y_mean = f_star)
y_mean = self.y_train_mean_ + y_mean # undo normal.
# undo normalisation
y_mean = self.y_train_std_ * y_mean + self.y_train_mean_

if return_cov:
v = cho_solve((self.L_, True), K_trans.T) # Line 5
y_cov = self.kernel_(X) - K_trans.dot(v) # Line 6
# undo normalisation
y_cov = y_cov * self.y_train_std_**2
return y_mean, y_cov

elif return_std:
Expand All @@ -330,17 +338,22 @@ def predict(self, X, return_std=False, return_cov=False,
warnings.warn("Predicted variances smaller than 0. "
"Setting those variances to 0.")
y_var[y_var_negative] = 0.0
# undo normalisation
y_var = y_var * self.y_train_std_**2
y_std = np.sqrt(y_var)

if return_mean_grad:
grad = self.kernel_.gradient_x(X[0], self.X_train_)
grad_mean = np.dot(grad.T, self.alpha_)

# undo normalisation
grad_mean = grad_mean * self.y_train_std_
if return_std_grad:
grad_std = np.zeros(X.shape[1])
if not np.allclose(y_std, grad_std):
grad_std = -np.dot(K_trans,
np.dot(K_inv, grad))[0] / y_std
# undo normalisation
grad_std = grad_std * self.y_train_std_**2
return y_mean, y_std, grad_mean, grad_std

if return_std:
Expand Down
13 changes: 13 additions & 0 deletions skopt/tests/test_acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,19 @@ def test_acquisition_gradient():
check_gradient_correctness(X_new, gpr, acq_func, np.max(y))


@pytest.mark.fast_test
def test_acquisition_gradient_cookbook():
rng = np.random.RandomState(0)
X = rng.randn(20, 5)
y = rng.randn(20)
X_new = rng.randn(5)
gpr = cook_estimator("GP", Space(((-5.0, 5.0),)), random_state=0)
gpr.fit(X, y)

for acq_func in ["LCB", "PI", "EI"]:
check_gradient_correctness(X_new, gpr, acq_func, np.max(y))


@pytest.mark.fast_test
@pytest.mark.parametrize("acq_func", ["EIps", "PIps"])
def test_acquisition_per_second(acq_func):
Expand Down

0 comments on commit de32b5f

Please sign in to comment.