7
7
import numpy as np
8
8
import pytest
9
9
from numpy .testing import assert_allclose
10
- from scipy import linalg , optimize , sparse
10
+ from scipy import linalg , optimize
11
11
12
12
from sklearn ._loss .loss import (
13
13
HalfBinomialLoss ,
17
17
from sklearn .datasets import make_low_rank_matrix
18
18
from sklearn .linear_model ._linear_loss import LinearModelLoss
19
19
from sklearn .utils .extmath import squared_norm
20
+ from sklearn .utils .fixes import CSR_CONTAINERS
20
21
21
22
# We do not need to test all losses, just what LinearModelLoss does on top of the
22
23
# base losses.
@@ -104,8 +105,9 @@ def test_init_zero_coef(base_loss, fit_intercept, n_features, dtype):
104
105
@pytest .mark .parametrize ("fit_intercept" , [False , True ])
105
106
@pytest .mark .parametrize ("sample_weight" , [None , "range" ])
106
107
@pytest .mark .parametrize ("l2_reg_strength" , [0 , 1 ])
108
+ @pytest .mark .parametrize ("csr_container" , CSR_CONTAINERS )
107
109
def test_loss_grad_hess_are_the_same (
108
- base_loss , fit_intercept , sample_weight , l2_reg_strength
110
+ base_loss , fit_intercept , sample_weight , l2_reg_strength , csr_container
109
111
):
110
112
"""Test that loss and gradient are the same across different functions."""
111
113
loss = LinearModelLoss (base_loss = base_loss (), fit_intercept = fit_intercept )
@@ -150,7 +152,7 @@ def test_loss_grad_hess_are_the_same(
150
152
assert_allclose (h4 @ g4 , h3 (g3 ))
15
E627
1
153
152
154
# same for sparse X
153
- X = sparse . csr_matrix (X )
155
+ X = csr_container (X )
154
156
l1_sp = loss .loss (
155
157
coef , X , y , sample_weight = sample_weight , l2_reg_strength = l2_reg_strength
156
158
)
@@ -182,9 +184,9 @@ def test_loss_grad_hess_are_the_same(
182
184
@pytest .mark .parametrize ("base_loss" , LOSSES )
183
185
@pytest .mark .parametrize ("sample_weight" , [None , "range" ])
184
186
@pytest .mark .parametrize ("l2_reg_strength" , [0 , 1 ])
185
- @pytest .mark .parametrize ("X_sparse " , [ False , True ])
187
+ @pytest .mark .parametrize ("X_container " , CSR_CONTAINERS + [ None ])
186
188
def test_loss_gradients_hessp_intercept (
187
- base_loss , sample_weight , l2_reg_strength , X_sparse
189
+ base_loss , sample_weight , l2_reg_strength , X_container
188
190
):
189
191
"""Test that loss and gradient handle intercept correctly."""
190
192
loss = LinearModelLoss (base_loss = base_loss (), fit_intercept = False )
@@ -199,8 +201,8 @@ def test_loss_gradients_hessp_intercept(
199
201
:, :- 1
200
202
] # exclude intercept column as it is added automatically by loss_inter
201
203
202
- if X_sparse :
203
- X = sparse . csr_matrix (X )
204
+ if X_container is not None :
205
+ X = X_container (X )
204
206
205
207
if sample_weight == "range" :
206
208
sample_weight = np .linspace (1 , y .shape [0 ], num = y .shape [0 ])
0 commit comments