@@ -113,8 +113,6 @@ def _parallel_build_estimators(
113
113
estimators = []
114
114
estimators_features = []
115
115
116
- request_or_router = get_routing_for_object (ensemble .estimator_ )
117
-
118
116
# TODO: (slep6) remove if condition for unrouted sample_weight when metadata
119
117
# routing can't be disabled.
120
118
support_sample_weight = has_fit_parameter (ensemble .estimator_ , "sample_weight" )
@@ -164,9 +162,14 @@ def _parallel_build_estimators(
164
162
# Note: Row sampling can be achieved either through setting sample_weight or
165
163
# by indexing. The former is more efficient. Therefore, use this method
166
164
# if possible, otherwise use indexing.
167
- if (
168
- _routing_enabled () and request_or_router .consumes ("fit" , ("sample_weight" ,))
169
- ) or (not _routing_enabled () and support_sample_weight ):
165
+ if _routing_enabled ():
166
+ request_or_router = get_routing_for_object (ensemble .estimator_ )
167
+ consumes_sample_weight = request_or_router .consumes (
168
+ "fit" , ("sample_weight" ,)
169
+ )
170
+ else :
171
+ consumes_sample_weight = support_sample_weight
172
+ if consumes_sample_weight :
170
173
# Draw sub samples, using sample weights, and then fit
171
174
curr_sample_weight = _check_sample_weight (
172
175
fit_params_ .pop ("sample_weight" , None ), X
@@ -635,6 +638,9 @@ def get_metadata_routing(self):
635
638
def _get_estimator (self ):
636
639
"""Resolve which estimator to return."""
637
640
641
+ def _more_tags (self ):
642
+ return {"allow_nan" : _safe_tags (self ._get_estimator (), "allow_nan" )}
643
+
638
644
639
645
class BaggingClassifier (ClassifierMixin , BaseBagging ):
640
646
"""A Bagging classifier.
@@ -835,7 +841,9 @@ def __init__(
835
841
836
842
def _get_estimator (self ):
837
843
"""Resolve which estimator to return (default is DecisionTreeClassifier)"""
838
- return self .estimator or DecisionTreeClassifier ()
844
+ if self .estimator is None :
845
+ return DecisionTreeClassifier ()
846
+ return self .estimator
839
847
840
848
def _set_oob_score (self , X , y ):
841
849
n_samples = y .shape [0 ]
@@ -1059,14 +1067,6 @@ def decision_function(self, X):
1059
1067
1060
1068
return decisions
1061
1069
1062
- def _more_tags (self ):
1063
- if self .estimator is None :
1064
- estimator = DecisionTreeClassifier ()
1065
- else :
1066
- estimator = self .estimator
1067
-
1068
- return {"allow_nan" : _safe_tags (estimator , "allow_nan" )}
1069
-
1070
1070
1071
1071
class BaggingRegressor (RegressorMixin , BaseBagging ):
1072
1072
"""A Bagging regressor.
@@ -1328,13 +1328,8 @@ def _set_oob_score(self, X, y):
1328
1328
self .oob_prediction_ = predictions
1329
1329
self .oob_score_ = r2_score (y , predictions )
1330
1330
1331
- def _more_tags (self ):
1332
- if self .estimator is None :
1333
- estimator = DecisionTreeRegressor ()
1334
- else :
1335
- estimator = self .estimator
1336
- return {"allow_nan" : _safe_tags (estimator , "allow_nan" )}
1337
-
1338
1331
def _get_estimator (self ):
1339
1332
"""Resolve which estimator to return (default is DecisionTreeClassifier)"""
1340
- return self .estimator or DecisionTreeRegressor ()
1333
+ if self .estimator is None :
1334
+ return DecisionTreeRegressor ()
1335
+ return self .estimator
0 commit comments