8000 COSMIT Mimimal refactoring of gradient_boosting.py (#11921) · jnothman/scikit-learn@d95cd91 · GitHub
[go: up one dir, main page]

Skip to content

Commit d95cd91

Browse files
NicolasHugjnothman
authored andcommitted
COSMIT Mimimal refactoring of gradient_boosting.py (scikit-learn#11921)
1 parent c225cf0 commit d95cd91

File tree

1 file changed

+6
-14
lines changed

1 file changed

+6
-14
lines changed

sklearn/ensemble/gradient_boosting.py

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1190,22 +1190,14 @@ def _fit_stage(self, i, X, y, y_pred, sample_weight, sample_mask,
11901190
# no inplace multiplication!
11911191
sample_weight = sample_weight * sample_mask.astype(np.float64)
11921192

1193-
if X_csc is not None:
1194-
tree.fit(X_csc, residual, sample_weight=sample_weight,
1195-
check_input=False, X_idx_sorted=X_idx_sorted)
1196-
else:
1197-
tree.fit(X, residual, sample_weight=sample_weight,
1198-
check_input=False, X_idx_sorted=X_idx_sorted)
1193+
X = X_csr if X_csr is not None else X
1194+
tree.fit(X, residual, sample_weight=sample_weight,
1195+
check_input=False, X_idx_sorted=X_idx_sorted)
11991196

12001197
# update tree leaves
1201-
if X_csr is not None:
1202-
loss.update_terminal_regions(tree.tree_, X_csr, y, residual, y_pred,
1203-
sample_weight, sample_mask,
1204-
self.learning_rate, k=k)
1205-
else:
1206-
loss.update_terminal_regions(tree.tree_, X, y, residual, y_pred,
1207-
sample_weight, sample_mask,
1208-
self.learning_rate, k=k)
1198+
loss.update_terminal_regions(tree.tree_, X, y, residual, y_pred,
1199+
sample_weight, sample_mask,
1200+
self.learning_rate, k=k)
12091201

12101202
# add tree to ensemble
12111203
self.estimators_[i, k] = tree

0 commit comments

Comments
 (0)
0