8000 ENH: Remove compute_importances in AdaBoost · seckcoder/scikit-learn@6bd1c75 · GitHub
[go: up one dir, main page]

Skip to content

Commit 6bd1c75

Browse files
committed
ENH: Remove compute_importances in AdaBoost
1 parent f229d38 commit 6bd1c75

File tree

1 file changed

+4
-18
lines changed

1 file changed

+4
-18
lines changed

sklearn/ensemble/weight_boosting.py

Lines changed: 4 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323

2424
import numpy as np
2525
from numpy.core.umath_tests import inner1d
26-
from warnings import warn
2726

2827
from .base import BaseEnsemble
2928
from ..base import ClassifierMixin, RegressorMixin
@@ -51,8 +50,7 @@ def __init__(self,
5150
base_estimator,
5251
n_estimators=50,
5352
estimator_params=tuple(),
54-
learning_rate=1.,
55-
compute_importances=False):
53+
learning_rate=1.):
5654

5755
super(BaseWeightBoosting, self).__init__(
5856
base_estimator=base_estimator,
@@ -63,14 +61,6 @@ def __init__(self,
6361
self.estimator_errors_ = None
6462
self.learning_rate = learning_rate
6563

66-
if compute_importances:
67-
warn("Setting compute_importances=True is no longer "
68-
"required. Variable importances are now computed on the fly "
69-
"when accessing the feature_importances_ attribute. This "
70-
"parameter will be removed in 0.15.", DeprecationWarning)
71-
72-
self.compute_importances = compute_importances
73-
7464
def fit(self, X, y, sample_weight=None):
7565
"""Build a boosted classifier/regressor from the training set (X, y).
7666
@@ -326,14 +316,12 @@ def __init__(self,
326316
base_estimator=DecisionTreeClassifier(max_depth=1),
327317
n_estimators=50,
328318
learning_rate=1.,
329-
algorithm='SAMME.R',
330-
compute_importances=False):
319+
algorithm='SAMME.R'):
331320

332321
super(AdaBoostClassifier, self).__init__(
333322
base_estimator=base_estimator,
334323
n_estimators=n_estimators,
335-
learning_rate=learning_rate,
336-
compute_importances=compute_importances)
324+
learning_rate=learning_rate)
337325

338326
self.algorithm = algorithm
339327

@@ -845,14 +833,12 @@ def __init__(self,
845833
n_estimators=50,
846834
learning_rate=1.,
847835
loss='linear',
848-
compute_importances=False,
849836
random_state=None):
850837

851838
super(AdaBoostRegressor, self).__init__(
852839
base_estimator=base_estimator,
853840
n_estimators=n_estimators,
854-
learning_rate=learning_rate,
855-
compute_importances=compute_importances)
841+
learning_rate=learning_rate)
856842

857843
self.loss = loss
858844
self.random_state = random_state

0 commit comments

Comments
 (0)
0