E5DA TST use global_random_seed in sklearn/gaussian_process/tests/test_gpc.py by OmarManzoor · Pull Request #24600 · scikit-learn/scikit-learn · GitHub
[go: up one dir, main page]

Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions sklearn/gaussian_process/tests/test_gpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,11 @@ def test_lml_gradient(kernel):
assert_almost_equal(lml_gradient, lml_gradient_approx, 3)


def test_random_starts():
def test_random_starts(global_random_seed):
# Test that an increasing number of random-starts of GP fitting only
# increases the log marginal likelihood of the chosen theta.
n_samples, n_features = 25, 2
rng = np.random.RandomState(0)
rng = np.random.RandomState(global_random_seed)
X = rng.randn(n_samples, n_features) * 2 - 1
y = (np.sin(X).sum(axis=1) + np.sin(3 * X).sum(axis=1)) > 0

Expand All @@ -132,19 +132,21 @@ def test_random_starts():
last_lml = -np.inf
for n_restarts_optimizer in range(5):
gp = GaussianProcessClassifier(
kernel=kernel, n_restarts_optimizer=n_restarts_optimizer, random_state=0
kernel=kernel,
n_restarts_optimizer=n_restarts_optimizer,
random_state=global_random_seed,
).fit(X, y)
lml = gp.log_marginal_likelihood(gp.kernel_.theta)
assert lml > last_lml - np.finfo(np.float32).eps
last_lml = lml


@pytest.mark.parametrize("kernel", non_fixed_kernels)
def test_custom_optimizer(kernel):
def test_custom_optimizer(kernel, global_random_seed):
# Test that GPC can use externally defined optimizers.
# Define a dummy optimizer that simply tests 10 random hyperparameters
def optimizer(obj_func, initial_theta, bounds):
rng = np.random.RandomState(0)
rng = np.random.RandomState(global_random_seed)
theta_opt, func_min = initial_theta, obj_func(
initial_theta, eval_gradient=False
)
Expand All @@ -160,9 +162,9 @@ def optimizer(obj_func, initial_theta, bounds):
gpc = GaussianProcessClassifier(kernel=kernel, optimizer=optimizer)
gpc.fit(X, y_mc)
# Checks that optimizer improved marginal likelihood
assert gpc.log_marginal_likelihood(gpc.kernel_.theta) > gpc.log_marginal_likelihood(
kernel.theta
)
assert gpc.log_marginal_likelihood(
gpc.kernel_.theta
) >= gpc.log_marginal_likelihood(kernel.theta)


@pytest.mark.parametrize("kernel", kernels)
Expand Down
0