6
6
from numpy .testing import assert_almost_equal
7
7
from numpy .testing import assert_allclose
8 8
from numpy .testing import assert_equal
9
+ import pytest
9
10
10
11
from sklearn .utils import check_random_state
11
12
from sklearn .utils .stats import _weighted_percentile
18
19
from sklearn .ensemble ._gb_losses import MultinomialDeviance
19
20
from sklearn .ensemble ._gb_losses import ExponentialLoss
20
21
from sklearn .ensemble ._gb_losses import LOSS_FUNCTIONS
22
+ from sklearn .linear_model import LinearRegression
23
+ from sklearn .linear_model import LogisticRegression
21
24
22
25
23
26
def test_binomial_deviance ():
@@ -257,27 +260,13 @@ def test_init_raw_predictions_values():
257
260
p = y .mean ()
258
261
assert_almost_equal (raw_predictions , np .log (p / (1 - p )))
259
262
260
- # FIXME: uncomment this and fix
261
- # for y_unstable in (np.zeros(shape=n_samples), np.ones(shape=n_samples)):
262
- # init_estimator = loss.init_estimator().fit(X, y_unstable)
263
- # raw_predictions = loss.get_init_raw_predictions(y_unstable,
264
- # init_estimator)
265
- # assert_all_finite(raw_predictions)
266
-
267
263
# Exponential loss
268
264
loss = ExponentialLoss (n_classes = 2 )
269
265
init_estimator = loss .init_estimator ().fit (X , y )
270
266
raw_predictions = loss .get_init_raw_predictions (y , init_estimator )
271
267
p = y .mean ()
272
268
assert_almost_equal (raw_predictions , .5 * np .log (p / (1 - p )))
273
269
274
- # FIXME: uncomment this and fix
275
- # for y_unstable in (np.zeros(shape=n_samples), np.ones(shape=n_samples)):
276
- # init_estimator = loss.init_estimator().fit(X, y_unstable)
277
- # raw_predictions = loss.get_init_raw_predictions(y_unstable,
278
- # init_estimator)
279
- # assert_all_finite(raw_predictions)
280
-
281
270
# Multinomial deviance loss
282
271
for n_classes in range (3 , 5 ):
283
272
y = rng .randint (0 , n_classes , size = n_samples )
@@ -288,9 +277,49 @@ def test_init_raw_predictions_values():
288
277
p = (y == k ).mean ()
289
278
assert_almost_equal (raw_predictions [:, k ], np .log (p ))
290
279
291
- # FIXME: uncomment this and fix
292
- # for y_unstable in (np.zeros(shape=n_samples), np.ones(shape=n_samples)):
293
- # init_estimator = loss.init_estimator().fit(X, y_unstable)
294
- # raw_predictions = loss.get_init_raw_predictions(y_unstable,
295
- # init_estimator)
296
- # assert_all_finite(raw_predictions)
280
+
281
+ def test_bad_init_estimator ():
282
+ # check that the init estimator predict() or predict_proba() methods output
283
+ # expected shape
284
+
285
+ rng = np .random .RandomState (0 )
286
+ n_samples = 100
287
+
288
+ X = rng .normal (size = (n_samples , 10 ))
289
+
290
+ # Regression losses
291
+ # train init estimator on multioutput regression target
292
+ y_init_est = rng .normal (size = (n_samples , 2 ))
293
+ lr = LinearRegression ().fit (X , y_init_est )
294
+ for loss in (LeastSquaresError (n_classes = 1 ),
295
+ LeastAbsoluteError (n_classes = 1 ),
296
+ QuantileLossFunction (n_classes = 1 ),
297
+ HuberLossFunction (n_classes = 1 )):
298
+ with pytest .raises (
299
+ ValueError ,
300
+ match = 'The init estimator predicted output with shape'
301
+ ):
302
+ loss .get_init_raw_predictions (X , estimator = lr )
303
+
304
+ # Binomial deviance and exponential loss
305
+ # train init estimator on 3 classes instead of 2
306
+ y_init_est = rng .randint (0 , 3 , size = (n_samples ))
307
+ lr = LogisticRegression ().fit (X , y_init_est )
308
+ for loss in (BinomialDeviance (n_classes = 2 ),
309
+ ExponentialLoss (n_classes = 2 )):
310
+ with pytest .raises (
311
+ ValueError ,
312
+ match = 'The init estimator predicted probabilities with shape'
313
+ ):
314
+ loss .get_init_raw_predictions (X , estimator = lr )
315
+
316
+ # Multinomial deviance
317
+ # train init estimator on 4 classes instead of 3
318
+ y_init_est = rng .randint (0 , 4 , size = (n_samples ))
319
+ lr = LogisticRegression ().fit (X , y_init_est )
320
+ loss = MultinomialDeviance (n_classes = 3 )
321
+ with pytest .raises (
322
+ ValueError ,
323
+ match = 'The init estimator predicted probabilities with shape'
324
+ ):
325
+ loss .get_init_raw_predictions (X , estimator = lr )
0 commit comments