8
8
9
9
import numpy as np
10
10
import pytest
11
- import scipy .sparse as sp
12
11
from scipy .special import logsumexp
13
12
14
13
from sklearn ._loss .loss import HalfMultinomialLoss
27
26
assert_array_almost_equal ,
28
27
)
29
28
from sklearn .utils .extmath import row_norms
29
+ from sklearn .utils .fixes import CSR_CONTAINERS
30
30
31
31
iris = load_iris ()
32
32
@@ -356,7 +356,8 @@ def test_regressor_matching():
356
356
357
357
358
358
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
359
- def test_sag_pobj_matches_logistic_regression ():
359
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
360
+ def test_sag_pobj_matches_logistic_regression (csr_container ):
360
361
"""tests if the sag pobj matches log reg"""
361
362
n_samples = 100
362
363
alpha = 1.0
@@ -383,7 +384,7 @@ def test_sag_pobj_matches_logistic_regression():
383
384
)
384
385<
10000
/code>
385
386
clf1 .fit (X , y )
386
- clf2 .fit (sp . csr_matrix (X ), y )
387
+ clf2 .fit (csr_container (X ), y )
387
388
clf3 .fit (X , y )
388
389
389
390
pobj1 = get_pobj (clf1 .coef_ , alpha , X , y , log_loss )
@@ -396,7 +397,8 @@ def test_sag_pobj_matches_logistic_regression():
396
397
397
398
398
399
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
399
- def test_sag_pobj_matches_ridge_regression ():
400
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
401
+ def test_sag_pobj_matches_ridge_regression (csr_container ):
400
402
"""tests if the sag pobj matches ridge reg"""
401
403
n_samples = 100
402
404
n_features = 10
@@ -427,7 +429,7 @@ def test_sag_pobj_matches_ridge_regression():
427
429
)
428
430
429
431
clf1 .fit (X , y )
430
- clf2 .fit (sp . csr_matrix (X ), y )
432
+ clf2 .fit (csr_container (X ), y )
431
433
clf3 .fit (X , y )
432
434
433
435
pobj1 = get_pobj (clf1 .coef_ , alpha , X , y , squared_loss )
@@ -440,7 +442,8 @@ def test_sag_pobj_matches_ridge_regression():
440
442
441
443
442
444
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
443
- def test_sag_regressor_computed_correctly ():
445
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
446
+ def test_sag_regressor_computed_correctly (csr_container ):
444
447
"""tests if the sag regressor is computed correctly"""
445
448
alpha = 0.1
446
449
n_features = 10
@@ -465,7 +468,7 @@ def test_sag_regressor_computed_correctly():
465
468
clf2 = clone (clf1 )
466
469
467
470
clf1 .fit (X , y )
468
- clf2 .fit (sp . csr_matrix (X ), y )
471
+ clf2 .fit (csr_container (X ), y )
469
472
470
473
spweights1 , spintercept1 = sag_sparse (
471
474
X ,
@@ -551,7 +554,8 @@ def test_get_auto_step_size():
551
554
552
555
553
556
@pytest .mark .parametrize ("seed" , range (3 )) # locally tested with 1000 seeds
554
- def test_sag_regressor (seed ):
557
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
558
+ def test_sag_regressor (seed , csr_container ):
555
559
"""tests if the sag regressor performs well"""
556
560
xmin , xmax = - 5 , 5
557
561
n_samples = 300
@@ -573,7 +577,7 @@ def test_sag_regressor(seed):
573
577
)
574
578
clf2 = clone (clf1 )
575
579
clf1 .fit (X , y )
576
- clf2 .fit (sp . csr_matrix (X ), y )
580
+ clf2 .fit (csr_container (X ), y )
577
581
score1 = clf1 .score (X , y )
578
582
score2 = clf2 .score (X , y )
579
583
assert score1 > 0.98
@@ -585,15 +589,16 @@ def test_sag_regressor(seed):
585
589
clf1 = Ridge (tol = tol , solver = "sag" , max_iter = max_iter , alpha = alpha * n_samples )
586
590
clf2 = clone (clf1 )
587
591
clf1 .fit (X , y )
588
- clf2 .fit (sp . csr_matrix (X ), y )
592
+ clf2 .fit (csr_container (X ), y )
589
593
score1 = clf1 .score (X , y )
590
594
score2 = clf2 .score (X , y )
591
595
assert score1 > 0.45
592
596
assert score2 > 0.45
593
597
594
598
595
599
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
596
- def test_sag_classifier_computed_correctly ():
600
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
601
+ def test_sag_classifier_computed_correctly (csr_container ):
597
602
"""tests if the binary classifier is computed correctly"""
598
603
alpha = 0.1
599
604
n_samples = 50
@@ -619,7 +624,7 @@ def test_sag_classifier_computed_correctly():
619
624
clf2 = clone (clf1 )
620
625
621
626
clf1 .fit (X , y )
622
- clf2 .fit (sp . csr_matrix (X ), y )
627
+ clf2 .fit (csr_container (X ), y )
623
628
624
629
spweights , spintercept = sag_sparse (
625
630
X ,
@@ -649,7 +654,8 @@ def test_sag_classifier_computed_correctly():
649
654
650
655
651
656
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
652
- def test_sag_multiclass_computed_correctly ():
657
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
658
+ def test_sag_multiclass_computed_correctly (csr_container ):
653
659
"""tests if the multiclass classifier is computed correctly"""
654
660
alpha = 0.1
655
661
n_samples = 20
@@ -672,7 +678,7 @@ def test_sag_multiclass_computed_correctly():
672
678
clf2 = clone (clf1 )
673
679
674
680
clf1 .fit (X , y )
675
- clf2 .fit (sp . csr_matrix (X ), y )
681
+ clf2 .fit (csr_container (X ), y )
676
682
677
683
coef1 = []
678
684
intercept1 = []
@@ -720,7 +726,8 @@ def test_sag_multiclass_computed_correctly():
720
726
assert_almost_equal (clf2 .intercept_ [i ], intercept2 [i ], decimal = 1 )
721
727
722
728
723
- def test_classifier_results ():
729
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
730
+ def test_classifier_results (csr_container ):
724
731
"""tests if classifier results match target"""
725
732
alpha = 0.1
726
733
n_features = 20
@@ -742,15 +749,16 @@ def test_classifier_results():
742
749
clf2 = clone (clf1 )
743
750
744
751
clf1 .fit (X , y )
745
- clf2 .fit (sp . csr_matrix (X ), y )
752
+ clf2 .fit (csr_container (X ), y )
746
753
pred1 = clf1 .predict (X )
747
754
pred2 = clf2 .predict (X )
748
755
assert_almost_equal (pred1 , y , decimal = 12 )
749
756
assert_almost_equal (pred2 , y , decimal = 12 )
750
757
751
758
752
759
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
753
- def test_binary_classifier_class_weight ():
760
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
761
+ def test_binary_classifier_class_weight (csr_container ):
754
762
"""tests binary classifier with classweights for each class"""
755
763
alpha = 0.1
756
764
n_samples = 50
@@ -778,7 +786,7 @@ def test_binary_classifier_class_weight():
778
786
clf2 = clone (clf1 )
779
787
780
788
clf1 .fit (X , y )
781
- clf2 .fit (sp . csr_matrix (X ), y )
789
+ clf2 .fit (csr_container (X ), y )
782
790
783
791
le = LabelEncoder ()
784
792
class_weight_ = compute_class_weight (class_weight , classes = np .unique (y ), y = y )
@@ -813,7 +821,8 @@ def test_binary_classifier_class_weight():
813
821
814
822
815
823
@pytest .mark .filterwarnings ("ignore:The max_iter was reached" )
816
- def test_multiclass_classifier_class_weight ():
824
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
825
+ def test_multiclass_classifier_class_weight (csr_container ):
817
826
"""tests multiclass with classweights for each class"""
818
827
alpha = 0.1
819
828
n_samples = 20
@@ -837,7 +846,7 @@ def test_multiclass_classifier_class_weight():
837
846
)
838
847
clf2 = clone (clf1 )
839
848
clf1 .fit (X , y )
840
- clf2 .fit (sp . csr_matrix (X ), y )
849
+ clf2 .fit (csr_container (X ), y )
841
850
842
851
le = LabelEncoder ()
843
852
class_weight_ = compute_class_weight (class_weight , classes = np .unique (y ), y = y )
0 commit comments