8000 [MRG] Set min_impurity_split in gradient boosting models · scikit-learn/scikit-learn@50e30b2 · GitHub
[go: up one dir, main page]

Skip to content

Commit 50e30b2

Browse files
author
Sebastian Pölsterl
committed
[MRG] Set min_impurity_split in gradient boosting models
self.min_impurity_split should be passed to DecisionTreeRegressor in BaseGradientBoosting._fit_stage. Fixes #8006
1 parent 40a1b7a commit 50e30b2

File tree

2 files changed

+13
-0
lines changed

2 files changed

+13
-0
lines changed

sklearn/ensemble/gradient_boosting.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -767,6 +767,7 @@ def _fit_stage(self, i, X, y, y_pred, sample_weight, sample_mask,
767767
min_samples_split=self.min_samples_split,
768768
min_samples_leaf=self.min_samples_leaf,
769769
min_weight_fraction_leaf=self.min_weight_fraction_leaf,
770+
min_impurity_split=self.min_impurity_split,
770771
max_features=self.max_features,
771772
max_leaf_nodes=self.max_leaf_nodes,
772773
random_state=random_state,

sklearn/ensemble/tests/test_gradient_boosting.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -961,6 +961,18 @@ def test_max_leaf_nodes_max_depth():
961961
assert_equal(tree.max_depth, 1)
962962

963963

964+
def test_min_impurity_split():
965+
# Test precedence of max_leaf_nodes over max_depth.
966+
X, y = datasets.make_hastie_10_2(n_samples=100, random_state=1)
967+
all_estimators = [GradientBoostingRegressor,
968+
GradientBoostingClassifier]
969+
970+
for GBEstimator in all_estimators:
971+
est = GBEstimator(min_impurity_split=0.1).fit(X, y)
972+
for tree in est.estimators_.flat:
973+
assert_equal(tree.min_impurity_split, 0.1)
974+
975+
964976
def test_warm_start_wo_nestimators_change():
965977
# Test if warm_start does nothing if n_estimators is not changed.
966978
# Regression test for #3513.

0 commit comments

Comments
 (0)
0