@@ -191,10 +191,9 @@ def _validate_steps(self):
191
191
"Last step of Pipeline should implement fit. "
192
192
"'%s' (type %s) doesn't" % (estimator , type (estimator )))
193
193
194
- def _iter (self , include_final_estimator = False ):
194
+ def _iter (self , include_final_estimator = True ):
195
195
"""
196
- Generate (name, trans) tuples excluding None and
197
- 'passthrough' transformers
196
+ Generate (name, trans) tuples excluding 'passthrough' transformers
198
197
"""
199
198
stop = len (self .steps )
200
199
if not include_final_estimator :
@@ -235,7 +234,8 @@ def _fit(self, X, y=None, **fit_params):
235
234
step , param = pname .split ('__' , 1 )
236
235
fit_params_steps [step ][param ] = pval
237
236
Xt = X
238
- for step_idx , (name , transformer ) in enumerate (self ._iter ()):
237
+ for step_idx , (name , transformer ) in enumerate (
238
+ self ._iter (include_final_estimator = False )):
239
239
if hasattr (memory , 'location' ):
240
240
# joblib >= 0.12
241
241
if memory .location is None :
@@ -356,7 +356,7 @@ def predict(self, X, **predict_params):
8000
code>
356
356
y_pred : array-like
357
357
"""
358
358
Xt = X
359
- for name , transform in self ._iter ():
359
+ for name , transform in self ._iter (include_final_estimator = False ):
360
360
Xt = transform .transform (Xt )
361
361
return self .steps [- 1 ][- 1 ].predict (Xt , ** predict_params )
362
362
@@ -405,7 +405,7 @@ def predict_proba(self, X):
405
405
y_proba : array-like, shape = [n_samples, n_classes]
406
406
"""
407
407
Xt = X
408
- for name , transform in self ._iter ():
408
+ for name , transform in self ._iter (include_final_estimator = False ):
409
409
Xt = transform .transform (Xt )
410
410
return self .steps [- 1 ][- 1 ].predict_proba (Xt )
411
411
@@ -424,7 +424,7 @@ def decision_function(self, X):
424
424
y_score : array-like, shape = [n_samples, n_classes]
425
425
"""
426
426
Xt = X
427
- for name , transform in self ._iter ():
427
+ for name , transform in self ._iter (include_final_estimator = False ):
428
428
Xt = transform .transform (Xt )
429
429
return self .steps [- 1 ][- 1 ].decision_function (Xt )
430
430
@@ -443,7 +443,7 @@ def predict_log_proba(self, X):
443
443
y_score : array-like, shape = [n_samples, n_classes]
444
444
"""
445
445
Xt = X
446
- for name , transform in self ._iter ():
446
+ for name , transform in self ._iter (include_final_estimator = False ):
447
447
Xt = transform .transform (Xt )
448
448
return self .steps [- 1 ][- 1 ].predict_log_proba (Xt )
449
449
@@ -472,7 +472,7 @@ def transform(self):
472
472
473
473
def _transform (self , X ):
474
474
Xt = X
475
- for _ , transform in self ._iter (include_final_estimator = True ):
475
+ for _ , transform in self ._iter ():
476
476
Xt = transform .transform (Xt )
477
477
return Xt
478
478
@@ -496,13 +496,13 @@ def inverse_transform(self):
496
496
"""
497
497
# raise AttributeError if necessary for hasattr behaviour
498
498
# XXX: Handling the None case means we can't use if_delegate_has_method
499
- for _ , transform in self ._iter (include_final_estimator = True ):
499
+ for _ , transform in self ._iter ():
500
500
transform .inverse_transform
501
501
return self ._inverse_transform
502
502
503
503
def _inverse_transform (self , X ):
504
504
Xt = X
505
- reverse_iter = reversed (list (self ._iter (include_final_estimator = True )))
505
+ reverse_iter = reversed (list (self ._iter ()))
506
506
for _ , transform in reverse_iter :
507
507
Xt = transform .inverse_transform (Xt )
508
508
return Xt
@@ -530,7 +530,7 @@ def score(self, X, y=None, sample_weight=None):
530
530
score : float
531
531
"""
532
532
Xt = X
533
- for name , transform in self ._iter ():
533
+ for name , transform in
4843
self ._iter (include_final_estimator = False ):
534
534
Xt = transform .transform (Xt )
535
535
score_params = {}
536
536
if sample_weight is not None :
0 commit comments