23
23
24
24
import numpy as np
25
25
from numpy .core .umath_tests import inner1d
26
- from warnings import warn
27
26
28
27
from .base import BaseEnsemble
29
28
from ..base import ClassifierMixin , RegressorMixin
@@ -51,8 +50,7 @@ def __init__(self,
51
50
base_estimator ,
52
51
n_estimators = 50 ,
53
52
estimator_params = tuple (),
54
- learning_rate = 1. ,
55
- compute_importances = False ):
53
+ learning_rate = 1. ):
56
54
57
55
super (BaseWeightBoosting , self ).__init__ (
58
56
base_estimator = base_estimator ,
@@ -63,14 +61,6 @@ def __init__(self,
63
61
self .estimator_errors_ = None
64
62
self .learning_rate = learning_rate
65
63
66
- if compute_importances :
67
- warn ("Setting compute_importances=True is no longer "
68
- "required. Variable importances are now computed on the fly "
69
- "when accessing the feature_importances_ attribute. This "
70
- "parameter will be removed in 0.15." , DeprecationWarning )
71
-
72
- self .compute_importances = compute_importances
73
-
74
64
def fit (self , X , y , sample_weight = None ):
75
65
"""Build a boosted classifier/regressor from the training set (X, y).
76
66
@@ -326,14 +316,12 @@ def __init__(self,
326
316
base_estimator = DecisionTreeClassifier (max_depth = 1 ),
327
317
n_estimators = 50 ,
328
318
learning_rate = 1. ,
329
- algorithm = 'SAMME.R' ,
330
- compute_importances = False ):
319
+ algorithm = 'SAMME.R' ):
331
320
332
321
super (AdaBoostClassifier , self ).__init__ (
333
322
base_estimator = base_estimator ,
334
323
n_estimators = n_estimators ,
335
- learning_rate = learning_rate ,
336
- compute_importances = compute_importances )
324
+ learning_rate = learning_rate )
337
325
338
326
self .algorithm = algorithm
339
327
@@ -845,14 +833,12 @@ def __init__(self,
845
833
n_estimators = 50 ,
846
834
learning_rate = 1. ,
847
835
loss = 'linear' ,
848
- compute_importances = False ,
849
836
random_state = None ):
850
837
851
838
super (AdaBoostRegressor , self ).__init__ (
852
839
base_estimator = base_estimator ,
853
840
n_estimators = n_estimators ,
854
- learning_rate = learning_rate ,
855
- compute_importances = compute_importances )
841
+ learning_rate = learning_rate )
856
842
857
843
self .loss = loss
858
844
self .random_state = random_state
0 commit comments