@@ -622,7 +622,7 @@ def _fit_stages(
622
622
X_csr ,
623
623
)
624
624
625
- # track deviance (= loss)
625
+ # track loss
626
626
if do_oob :
627
627
self .train_score_ [i ] = loss_ (
628
628
y [sample_mask ],
@@ -1056,28 +1056,28 @@ class GradientBoostingClassifier(ClassifierMixin, BaseGradientBoosting):
1056
1056
:func:`sklearn.inspection.permutation_importance` as an alternative.
1057
1057
1058
1058
oob_improvement_ : ndarray of shape (n_estimators,)
1059
- The improvement in loss (= deviance) on the out-of-bag samples
1059
+ The improvement in loss on the out-of-bag samples
1060
1060
relative to the previous iteration.
1061
1061
``oob_improvement_[0]`` is the improvement in
1062
1062
loss of the first stage over the ``init`` estimator.
1063
1063
Only available if ``subsample < 1.0``.
1064
1064
1065
1065
oob_scores_ : ndarray of shape (n_estimators,)
1066
- The full history of the loss (= deviance) values on the out-of-bag
1066
+ The full history of the loss values on the out-of-bag
1067
1067
samples. Only available if `subsample < 1.0`.
1068
1068
1069
1069
.. versionadded:: 1.3
1070
1070
1071
1071
oob_score_ : float
1072
- The last value of the loss (= deviance) on the out-of-bag samples. It is
1072
+ The last value of the loss on the out-of-bag samples. It is
1073
1073
the same as `oob_scores_[-1]`. Only available if `subsample < 1.0`.
1074
1074
1075
1075
.. versionadded:: 1.3
1076
1076
1077
1077
train_score_ : ndarray of shape (n_estimators,)
1078
- The i-th score ``train_score_[i]`` is the deviance (= loss) of the
1078
+ The i-th score ``train_score_[i]`` is the loss of the
1079
1079
model at iteration ``i`` on the in-bag sample.
1080
- If ``subsample == 1`` this is the deviance on the training data.
1080
+ If ``subsample == 1`` this is the loss on the training data.
1081
1081
1082
1082
init_ : estimator
1083
1083
The estimator that provides the initial predictions.
@@ -1619,28 +1619,28 @@ class GradientBoostingRegressor(RegressorMixin, BaseGradientBoosting):
1619
1619
:func:`sklearn.inspection.permutation_importance` as an alternative.
1620
1620
1621
1621
oob_improvement_ : ndarray of shape (n_estimators,)
1622
- The improvement in loss (= deviance) on the out-of-bag samples
1622
+ The improvement in loss on the out-of-bag samples
1623
1623
relative to the previous iteration.
1624
1624
``oob_improvement_[0]`` is the improvement in
1625
1625
loss of the first stage over the ``init`` estimator.
1626
1626
Only available if ``subsample < 1.0``.
1627
1627
1628
1628
oob_scores_ : ndarray of shape (n_estimators,)
1629
- The full history of the loss (= deviance) values on the out-of-bag
1629
+ The full history of the loss values on the out-of-bag
1630
1630
samples. Only available if `subsample < 1.0`.
1631
1631
1632
1632
.. versionadded:: 1.3
1633
1633
1634
1634
oob_score_ : float
1635
- The last value of the loss (= deviance) on the out-of-bag samples. It is
1635
+ The last value of the loss on the out-of-bag samples. It is
1636
1636
the same as `oob_scores_[-1]`. Only available if `subsample < 1.0`.
1637
1637
1638
1638
.. versionadded:: 1.3
1639
1639
1640
1640
train_score_ : ndarray of shape (n_estimators,)
1641
- The i-th score ``train_score_[i]`` is the deviance (= loss) of the
1641
+ The i-th score ``train_score_[i]`` is the loss of the
1642
1642
model at iteration ``i`` on the in-bag sample.
1643
- If ``subsample == 1`` this is the deviance on the training data.
1643
+ If ``subsample == 1`` this is the loss on the training data.
1644
1644
1645
1645
init_ : estimator
1646
1646
The estimator that provides the initial predictions.
0 commit comments