@@ -1133,13 +1133,6 @@ class GradientBoostingClassifier(ClassifierMixin, BaseGradientBoosting):
1133
1133
classification is a special case where only a single regression tree is
1134
1134
induced.
1135
1135
1136
- See :ref:`sphx_glr_auto_examples_ensemble_plot_gradient_boosting_oob.py` for
1137
- an example on using Out-of-Bag estimates to estimate the optimal number of
1138
- iterations for Gradient Boosting.
1139
- See
1140
- :ref:`sphx_glr_auto_examples_ensemble_plot_gradient_boosting_regularization.py`
1141
- for an example on using regularization with Gradient Boosting.
1142
-
1143
1136
:class:`~sklearn.ensemble.HistGradientBoostingClassifier` is a much faster variant
1144
1137
of this algorithm for intermediate and large datasets (`n_samples >= 10_000`) and
1145
1138
supports monotonic constraints.
@@ -1458,6 +1451,13 @@ class GradientBoostingClassifier(ClassifierMixin, BaseGradientBoosting):
1458
1451
... max_depth=1, random_state=0).fit(X_train, y_train)
1459
1452
>>> clf.score(X_test, y_test)
1460
1453
0.913...
1454
+
1455
+ See :ref:`sphx_glr_auto_examples_ensemble_plot_gradient_boosting_oob.py` for
1456
+ an example on using Out-of-Bag estimates to estimate the optimal number of
1457
+ iterations for Gradient Boosting. For a detailed example of utilizing
1458
+ regularization with
1459
+ :class:`~sklearn.ensemble.GradientBoostingClassifier`, please refer to
1460
+ :ref:`sphx_glr_auto_examples_ensemble_plot_gradient_boosting_regularization.py`.
1461
1461
"""
1462
1462
1463
1463
_parameter_constraints : dict = {
@@ -1746,9 +1746,6 @@ class GradientBoostingRegressor(RegressorMixin, BaseGradientBoosting):
1746
1746
each stage a regression tree is fit on the negative gradient of the given
1747
1747
loss function.
1748
1748
1749
- See :ref:`sphx_glr_auto_examples_ensemble_plot_gradient_boosting_regularization.py`
1750
- for an example on using regularization with Gradient Boosting.
1751
-
1752
1749
:class:`~sklearn.ensemble.HistGradientBoostingRegressor` is a much faster variant
1753
1750
of this algorithm for intermediate and large datasets (`n_samples >= 10_000`) and
1754
1751
supports monotonic constraints.
0 commit comments