@@ -924,7 +924,7 @@ class AdaBoostRegressor(RegressorMixin, BaseWeightBoosting):
924
924
The maximum number of estimators at which boosting is terminated.
925
925
In case of perfect fit, the learning procedure is stopped early.
926
926
927
- learning_rate : float, default=1.
927
+ learning_rate : float, default=1.0
928
928
Weight applied to each classifier at each boosting iteration. A higher
929
929
learning rate increases the contribution of each classifier. There is
930
930
a trade-off between the `learning_rate` and `n_estimators` parameters.
@@ -969,6 +969,19 @@ class AdaBoostRegressor(RegressorMixin, BaseWeightBoosting):
969
969
970
970
.. versionadded:: 0.24
971
971
972
+ See Also
973
+ --------
974
+ AdaBoostClassifier : An AdaBoost classifier.
975
+ GradientBoostingRegressor : Gradient Boosting Classification Tree.
976
+ sklearn.tree.DecisionTreeRegressor : A decision tree regressor.
977
+
978
+ References
979
+ ----------
980
+ .. [1] Y. Freund, R. Schapire, "A Decision-Theoretic Generalization of
981
+ on-Line Learning and an Application to Boosting", 1995.
982
+
983
+ .. [2] H. Drucker, "Improving Regressors using Boosting Techniques", 1997.
984
+
972
985
Examples
973
986
--------
974
987
>>> from sklearn.ensemble import AdaBoostRegressor
@@ -982,19 +995,6 @@ class AdaBoostRegressor(RegressorMixin, BaseWeightBoosting):
982
995
array([4.7972...])
983
996
>>> regr.score(X, y)
984
997
0.9771...
985
-
986
- See Also
987
- --------
988
- AdaBoostClassifier, GradientBoostingRegressor,
989
- sklearn.tree.DecisionTreeRegressor
990
-
991
- References
992
- ----------
993
- .. [1] Y. Freund, R. Schapire, "A Decision-Theoretic Generalization of
994
- on-Line Learning and an Application to Boosting", 1995.
995
-
996
- .. [2] H. Drucker, "Improving Regressors using Boosting Techniques", 1997.
997
-
998
998
"""
999
999
1000
1000
def __init__ (
@@ -1036,6 +1036,7 @@ def fit(self, X, y, sample_weight=None):
1036
1036
Returns
1037
1037
-------
1038
1038
self : object
1039
+ Fitted AdaBoostRegressor estimator.
1039
1040
"""
1040
1041
# Check loss
1041
1042
if self .loss not in ("linear" , "square" , "exponential" ):
0 commit comments