8000 Add min_impurity_decrease parameter to gradient boosting classes · sebp/scikit-survival@f6d0c74 · GitHub
[go: up one dir, main page]

Skip to content

Commit f6d0c74

Browse files
committed
Add min_impurity_decrease parameter to gradient boosting classes
Requires scikit-learn >= 0.19 See scikit-learn/scikit-learn#8449 Fixes #11
1 parent deeaeef commit f6d0c74

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

sksurv/ensemble/boosting.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
467467
criterion='friedman_mse',
468468
min_samples_split=2,
469469
min_samples_leaf=1, min_weight_fraction_leaf=0.,
470-
max_depth=3, min_impurity_split=1e-7, random_state=None,
470+
max_depth=3, min_impurity_split=None,
471+
min_impurity_decrease=0., random_state=None,
471472
max_features=None, max_leaf_nodes=None,
472473
subsample=1.0, dropout_rate=0.0,
473474
verbose=0):
@@ -481,6 +482,7 @@ def __init__(self, loss="coxph", learning_rate=0.1, n_estimators=100,
481482
min_weight_fraction_leaf=min_weight_fraction_leaf,
482483
max_depth=max_depth,
483484
min_impurity_split=min_impurity_split,
485+
min_impurity_decrease=min_impurity_decrease,
484486
init=ZeroSurvivalEstimator(),
485487
random_state=random_state,
486488
max_features=max_features,

0 commit comments

Comments
 (0)
0