8000 Revert "FIX GBDT init parameter when it's a pipeline (#13472)" · xhluca/scikit-learn@5551301 · GitHub
[go: up one dir, main page]

Skip to content

Commit 5551301

Browse files
author
Xing
committed
Revert "FIX GBDT init parameter when it's a pipeline (scikit-learn#13472)"
This reverts commit 7ab82a3.
1 parent 32e8694 commit 5551301

File tree

2 files changed

+10
-45
lines changed

2 files changed

+10
-45
lines changed

sklearn/ensemble/gradient_boosting.py

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1478,25 +1478,20 @@ def fit(self, X, y, sample_weight=None, monitor=None):
14781478
raw_predictions = np.zeros(shape=(X.shape[0], self.loss_.K),
14791479
dtype=np.float64)
14801480
else:
1481-
# XXX clean this once we have a support_sample_weight tag
1482-
if sample_weight_is_none:
1483-
self.init_.fit(X, y)
1484-
else:
1485-
msg = ("The initial estimator {} does not support sample "
1486-
"weights.".format(self.init_.__class__.__name__))
1487-
try:
1488-
self.init_.fit(X, y, sample_weight=sample_weight)
1489-
except TypeError: # regular estimator without SW support
1490-
raise ValueError(msg)
1491-
except ValueError as e:
1492-
if 'not enough values to unpack' in str(e): # pipeline
1493-
raise ValueError(msg) from e
1494-
else: # regular estimator whose input checking failed
1495-
raise
1481+
try:
1482+
self.init_.fit(X, y, sample_weight=sample_weight)
1483+
except TypeError:
1484+
if sample_weight_is_none:
1485+
self.init_.fit(X, y)
1486+
else:
1487+
raise ValueError(
1488+
"The initial estimator {} does not support sample "
1489+
"weights.".format(self.init_.__class__.__name__))
14961490

14971491
raw_predictions = \
14981492
self.loss_.get_init_raw_predictions(X, self.init_)
14991493

1494+
15001495
begin_at_stage = 0
15011496

15021497
# The rng state must be preserved if warm_start is True

sklearn/ensemble/tests/test_gradient_boosting.py

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,6 @@
3939
from sklearn.exceptions import DataConversionWarning
4040
from sklearn.exceptions import NotFittedError
4141
from sklearn.dummy import DummyClassifier, DummyRegressor
42-
from sklearn.pipeline import make_pipeline
43-
from sklearn.linear_model import LinearRegression
44-
from sklearn.svm import NuSVR
4542

4643

4744
GRADIENT_BOOSTING_ESTIMATORS = [GradientBoostingClassifier,
@@ -1381,33 +1378,6 @@ def test_gradient_boosting_with_init(gb, dataset_maker, init_estimator):
13811378
gb(init=init_est).fit(X, y, sample_weight=sample_weight)
13821379

13831380

1384-
def test_gradient_boosting_with_init_pipeline():
1385-
# Check that the init estimator can be a pipeline (see issue #13466)
1386-
1387-
X, y = make_regression(random_state=0)
1388-
init = make_pipeline(LinearRegression())
1389-
gb = GradientBoostingRegressor(init=init)
1390-
gb.fit(X, y) # pipeline without sample_weight works fine
1391-
1392-
with pytest.raises(
1393-
ValueError,
1394-
match='The initial estimator Pipeline does not support sample '
1395-
'weights'):
1396-
gb.fit(X, y, sample_weight=np.ones(X.shape[0]))
1397-
1398-
# Passing sample_weight to a pipeline raises a ValueError. This test makes
1399-
# sure we make the distinction between ValueError raised by a pipeline that
1400-
# was passed sample_weight, and a ValueError raised by a regular estimator
1401-
# whose input checking failed.
1402-
with pytest.raises(
1403-
ValueError,
1404-
match='nu <= 0 or nu > 1'):
1405-
# Note that NuSVR properly supports sample_weight
1406-
init = NuSVR(gamma='auto', nu=1.5)
1407-
gb = GradientBoostingRegressor(init=init)
1408-
gb.fit(X, y, sample_weight=np.ones(X.shape[0]))
1409-
1410-
14111381
@pytest.mark.parametrize('estimator, missing_method', [
14121382
(GradientBoostingClassifier(init=LinearSVC()), 'predict_proba'),
14131383
(GradientBoostingRegressor(init=OneHotEncoder()), 'predict')

0 commit comments

Comments
 (0)
0