8000 DOC Adaboost numpytest docstrings validation (#15485) · rasbt/scikit-learn@452d919 · GitHub
[go: up one dir, main page]

Skip to content

Commit 452d919

Browse files
abbiepoparth
authored andcommitted
DOC Adaboost numpytest docstrings validation (scikit-learn#15485)
1 parent c4545aa commit 452d919

File tree

1 file changed

+36
-21
lines changed

1 file changed

+36
-21
lines changed

sklearn/ensemble/_weight_boosting.py

Lines changed: 36 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ def staged_score(self, X, y, sample_weight=None):
223223
sample_weight : array-like of shape (n_samples,), default=None
224224
Sample weights.
225225
226-
Returns
226+
Yields
227227
-------
228228
z : float
229229
"""
@@ -242,7 +242,8 @@ def feature_importances_(self):
242242
243243
Returns
244244
-------
245-
feature_importances_ : array, shape = [n_features]
245+
feature_importances_ : ndarray of shape (n_features,)
246+
The feature importances.
246247
"""
247248
if self.estimators_ is None or len(self.estimators_) == 0:
248249
raise ValueError("Estimator not fitted, "
@@ -302,9 +303,9 @@ class AdaBoostClassifier(ClassifierMixin, BaseWeightBoosting):
302303
The base estimator from which the boosted ensemble is built.
303304
Support for sample weighting is required, as well as proper
304305
``classes_`` and ``n_classes_`` attributes. If ``None``, then
305-
the base estimator is ``DecisionTreeClassifier(max_depth=1)``
306+
the base estimator is ``DecisionTreeClassifier(max_depth=1)``.
306307
307-
n_estimators : integer, optional (default=50)
308+
n_estimators : int, optional (default=50)
308309
The maximum number of estimators at which boosting is terminated.
309310
In case of perfect fit, the learning procedure is stopped early.
310311
@@ -350,6 +351,32 @@ class AdaBoostClassifier(ClassifierMixin, BaseWeightBoosting):
350351
feature_importances_ : ndarray of shape (n_features,)
351352
The feature importances if supported by the ``base_estimator``.
352353
354+
See Also
355+
--------
356+
AdaBoostRegressor
357+
An AdaBoost regressor that begins by fitting a regressor on the
358+
original dataset and then fits additional copies of the regressor
359+
on the same dataset but where the weights of instances are
360+
adjusted according to the error of the current prediction.
361+
362+
GradientBoostingClassifier
363+
GB builds an additive model in a forward stage-wise fashion. Regression
364+
trees are fit on the negative gradient of the binomial or multinomial
365+
deviance loss function. Binary classification is a special case where
366+
only a single regression tree is induced.
367+
368+
sklearn.tree.DecisionTreeClassifier
369+
A non-parametric supervised learning method used for classification.
370+
Creates a model that predicts the value of a target variable by
371+
learning simple decision rules inferred from the data features.
372+
373+
References
374+
----------
375+
.. [1] Y. Freund, R. Schapire, "A Decision-Theoretic Generalization of
376+
on-Line Learning and an Application to Boosting", 1995.
377+
378+
.. [2] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009.
379+
353380
Examples
354381
--------
355382
>>> from sklearn.ensemble import AdaBoostClassifier
@@ -366,19 +393,6 @@ class AdaBoostClassifier(ClassifierMixin, BaseWeightBoosting):
366393
array([1])
367394
>>> clf.score(X, y)
368395
0.983...
369-
370-
See also
371-
--------
372-
AdaBoostRegressor, GradientBoostingClassifier,
373-
sklearn.tree.DecisionTreeClassifier
374-
375-
References
376-
----------
377-
.. [1] Y. Freund, R. Schapire, "A Decision-Theoretic Generalization of
378-
on-Line Learning and an Application to Boosting", 1995.
379-
380-
.. [2] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009.
381-
382396
"""
383397
def __init__(self,
384398
base_estimator=None,
@@ -414,6 +428,7 @@ def fit(self, X, y, sample_weight=None):
414428
Returns
415429
-------
416430
self : object
431+
A fitted estimator.
417432
"""
418433
# Check that algorithm is supported
419434
if self.algorithm not in ('SAMME', 'SAMME.R'):
@@ -632,7 +647,7 @@ def staged_predict(self, X):
632647
The input samples. Sparse matrix can be CSC, CSR, COO,
633648
DOK, or LIL. COO, DOK, and LIL are converted to CSR.
634649
635-
Returns
650+
Yields
636651
-------
637652
y : generator of array, shape = [n_samples]
638653
The predicted classes.
@@ -703,7 +718,7 @@ def staged_decision_function(self, X):
703718
The training input samples. Sparse matrix can be CSC, CSR, COO,
704719
DOK, or LIL. COO, DOK, and LIL are converted to CSR.
705720
706-
Returns
721+
Yields
707722
-------
708723
score : generator of array, shape = [n_samples, k]
709724
The decision function of the input samples. The order of
@@ -811,7 +826,7 @@ def staged_predict_proba(self, X):
811826
The training input samples. Sparse matrix can be CSC, CSR, COO,
812827
DOK, or LIL. COO, DOK, and LIL are converted to CSR.
813828
814-
Returns
829+
Yields
815830
-------
816831
p : generator of array, shape = [n_samples]
817832
The class probabilities of the input samples. The order of
@@ -1130,7 +1145,7 @@ def staged_predict(self, X):
11301145
X : {array-like, sparse matrix} of shape (n_samples, n_features)
11311146
The training input samples.
11321147
1133-
Returns
1148+
Yields
11341149
-------
11351150
y : generator of array, shape = [n_samples]
11361151
The predicted regression values.

0 commit comments

Comments
 (0)
0