8000 TST use global_random_seed in test_pls.py (#24526) · scikit-learn/scikit-learn@a473525 · GitHub
[go: up one dir, main page]

Skip to content

Commit a473525

Browse files
yuanx749jeremiedbb
andauthored
TST use global_random_seed in test_pls.py (#24526)
Co-authored-by: jeremiedbb <jeremiedbb@yahoo.fr>
1 parent cae0b77 commit a473525

File tree

1 file changed

+18
-12
lines changed

1 file changed

+18
-12
lines changed

sklearn/cross_decomposition/tests/test_pls.py

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -484,31 +484,35 @@ def test_n_components_upper_bounds(Estimator):
484484

485485

486486
@pytest.mark.parametrize("n_samples, n_features", [(100, 10), (100, 200)])
487-
@pytest.mark.parametrize("seed", range(10))
488-
def test_singular_value_helpers(n_samples, n_features, seed):
487+
def test_singular_value_helpers(n_samples, n_features, global_random_seed):
489488
# Make sure SVD and power method give approximately the same results
490-
X, Y = make_regression(n_samples, n_features, n_targets=5, random_state=seed)
489+
X, Y = make_regression(
490+
n_samples, n_features, n_targets=5, random_state=global_random_seed
491+
)
491492
u1, v1, _ = _get_first_singular_vectors_power_method(X, Y, norm_y_weights=True)
492493
u2, v2 = _get_first_singular_vectors_svd(X, Y)
493494

494495
_svd_flip_1d(u1, v1)
495496
_svd_flip_1d(u2, v2)
496497

497-
rtol = 1e-1
498-
assert_allclose(u1, u2, rtol=rtol)
499-
assert_allclose(v1, v2, rtol=rtol)
498+
rtol = 1e-3
499+
# Setting atol because some coordinates are very close to zero
500+
assert_allclose(u1, u2, atol=u2.max() * rtol)
501+
assert_allclose(v1, v2, atol=v2.max() * rtol)
500502

501503

502-
def test_one_component_equivalence():
504+
def test_one_component_equivalence(global_random_seed):
503505
# PLSSVD, PLSRegression and PLSCanonical should all be equivalent when
504506
# n_components is 1
505-
X, Y = make_regression(100, 10, n_targets=5, random_state=0)
507+
X, Y = make_regression(100, 10, n_targets=5, random_state=global_random_seed)
506508
svd = PLSSVD(n_components=1).fit(X, Y).transform(X)
507509
reg = PLSRegression(n_components=1).fit(X, Y).transform(X)
508510
canonical = PLSCanonical(n_components=1).fit(X, Y).transform(X)
509511

510-
assert_allclose(svd, reg, rtol=1e-2)
511-
assert_allclose(svd, canonical, rtol=1e-2)
512+
rtol = 1e-3
513+
# Setting atol because some entries are very close to zero
514+
assert_allclose(svd, reg, atol=reg.max() * rtol)
515+
assert_allclose(svd, canonical, atol=canonical.max() * rtol)
512516

513517

514518
def test_svd_flip_1d():
@@ -526,9 +530,11 @@ def test_svd_flip_1d():
526530
assert_allclose(v, [-1, -2, -3])
527531

528532

529-
def test_loadings_converges():
533+
def test_loadings_converges(global_random_seed):
530534
"""Test that CCA converges. Non-regression test for #19549."""
531-
X, y = make_regression(n_samples=200, n_features=20, n_targets=20, random_state=20)
535+
X, y = make_regression(
536+
n_samples=200, n_features=20, n_targets=20, random_state=global_random_seed
537+
)
532538

533539
cca = CCA(n_components=10, max_iter=500)
534540

0 commit comments

Comments
 (0)
0