Expand Up
@@ -8,7 +8,6 @@
import numpy as np
import pytest
import scipy.sparse as sp
from scipy.special import logsumexp
from sklearn._loss.loss import HalfMultinomialLoss
Expand All
@@ -27,6 +26,7 @@
assert_array_almost_equal,
)
from sklearn.utils.extmath import row_norms
from sklearn.utils.fixes import CSR_CONTAINERS
iris = load_iris()
Expand Down
Expand Up
@@ -356,7 +356,8 @@ def test_regressor_matching():
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_sag_pobj_matches_logistic_regression():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_pobj_matches_logistic_regression(csr_container):
"""tests if the sag pobj matches log reg"""
n_samples = 100
alpha = 1.0
Expand All
@@ -383,7 +384,7 @@ def test_sag_pobj_matches_logistic_regression():
)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
clf3.fit(X, y)
pobj1 = get_pobj(clf1.coef_, alpha, X, y, log_loss)
Expand All
@@ -396,7 +397,8 @@ def test_sag_pobj_matches_logistic_regression():
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_sag_pobj_matches_ridge_regression():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_pobj_matches_ridge_regression(csr_container):
"""tests if the sag pobj matches ridge reg"""
n_samples = 100
n_features = 10
Expand Down
Expand Up
@@ -427,7 +429,7 @@ def test_sag_pobj_matches_ridge_regression():
)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
clf3.fit(X, y)
pobj1 = get_pobj(clf1.coef_, alpha, X, y, squared_loss)
Expand All
@@ -440,7 +442,8 @@ def test_sag_pobj_matches_ridge_regression():
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_sag_regressor_computed_correctly():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_regressor_computed_correctly(csr_container):
"""tests if the sag regressor is computed correctly"""
alpha = 0.1
n_features = 10
Expand All
@@ -465,7 +468,7 @@ def test_sag_regressor_computed_correctly():
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
spweights1, spintercept1 = sag_sparse(
X,
Expand Down
Expand Up
@@ -551,7 +554,8 @@ def test_get_auto_step_size():
@pytest.mark.parametrize("seed", range(3)) # locally tested with 1000 seeds
def test_sag_regressor(seed):
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_regressor(seed, csr_container):
"""tests if the sag regressor performs well"""
xmin, xmax = -5, 5
n_samples = 300
Expand All
@@ -573,7 +577,7 @@ def test_sag_regressor(seed):
)
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
score1 = clf1.score(X, y)
score2 = clf2.score(X, y)
assert score1 > 0.98
Expand All
@@ -585,15 +589,16 @@ def test_sag_regressor(seed):
clf1 = Ridge(tol=tol, solver="sag", max_iter=max_iter, alpha=alpha * n_samples)
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
score1 = clf1.score(X, y)
score2 = clf2.score(X, y)
assert score1 > 0.45
assert score2 > 0.45
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_sag_classifier_computed_correctly():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_classifier_computed_correctly(csr_container):
"""tests if the binary classifier is computed correctly"""
alpha = 0.1
n_samples = 50
Expand All
@@ -619,7 +624,7 @@ def test_sag_classifier_computed_correctly():
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
spweights, spintercept = sag_sparse(
X,
Expand Down
Expand Up
@@ -649,7 +654,8 @@ def test_sag_classifier_computed_correctly():
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_sag_multiclass_computed_correctly():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_sag_multiclass_computed_correctly(csr_container):
"""tests if the multiclass classifier is computed correctly"""
alpha = 0.1
n_samples = 20
Expand All
@@ -672,7 +678,7 @@ def test_sag_multiclass_computed_correctly():
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
coef1 = []
intercept1 = []
Expand Down
Expand Up
@@ -720,7 +726,8 @@ def test_sag_multiclass_computed_correctly():
assert_almost_equal(clf2.intercept_[i], intercept2[i], decimal=1)
def test_classifier_results():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_classifier_results(csr_container):
"""tests if classifier results match target"""
alpha = 0.1
n_features = 20
Expand All
@@ -742,15 +749,16 @@ def test_classifier_results():
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
pred1 = clf1.predict(X)
pred2 = clf2.predict(X)
assert_almost_equal(pred1, y, decimal=12)
assert_almost_equal(pred2, y, decimal=12)
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_binary_classifier_class_weight():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_binary_classifier_class_weight(csr_container):
"""tests binary classifier with classweights for each class"""
alpha = 0.1
n_samples = 50
Expand Down
Expand Up
@@ -778,7 +786,7 @@ def test_binary_classifier_class_weight():
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
le = LabelEncoder()
class_weight_ = compute_class_weight(class_weight, classes=np.unique(y), y=y)
Expand Down
Expand Up
@@ -813,7 +821,8 @@ def test_binary_classifier_class_weight():
@pytest.mark.filterwarnings("ignore:The max_iter was reached")
def test_multiclass_classifier_class_weight():
@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_multiclass_classifier_class_weight(csr_container):
"""tests multiclass with classweights for each class"""
alpha = 0.1
n_samples = 20
Expand All
@@ -837,7 +846,7 @@ def test_multiclass_classifier_class_weight():
)
clf2 = clone(clf1)
clf1.fit(X, y)
clf2.fit(sp.csr_matrix (X), y)
clf2.fit(csr_container (X), y)
le = LabelEncoder()
class_weight_ = compute_class_weight(class_weight, classes=np.unique(y), y=y)
Expand Down
TST Extend tests for
scipy.sparse.*array
insklearn/linear_model/tests/test_sag.py
#27206New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Uh oh!
There was an error while loading. Please reload this page.
TST Extend tests for
scipy.sparse.*array
insklearn/linear_model/tests/test_sag.py
#27206Changes from all commits
639ce0e
File filter
Filter by extension
Conversations
Uh oh!
There was an error while loading. Please reload this page.
Jump to
Uh oh!
There was an error while loading. Please reload this page.