8000 RFC: Change default to True · scikit-learn/scikit-learn@c83456b · GitHub
[go: up one dir, main page]

Skip to content

Commit c83456b

Browse files
committed
RFC: Change default to True
1 parent d3a9705 commit c83456b

File tree

1 file changed

+12
-12
lines changed

1 file changed

+12
-12
lines changed

sklearn/pipeline.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -191,10 +191,9 @@ def _validate_steps(self):
191191
"Last step of Pipeline should implement fit. "
192192
"'%s' (type %s) doesn't" % (estimator, type(estimator)))
193193

194-
def _iter(self, include_final_estimator=False):
194+
def _iter(self, include_final_estimator=True):
195195
"""
196-
Generate (name, trans) tuples excluding None and
197-
'passthrough' transformers
196+
Generate (name, trans) tuples excluding 'passthrough' transformers
198197
"""
199198
stop = len(self.steps)
200199
if not include_final_estimator:
@@ -235,7 +234,8 @@ def _fit(self, X, y=None, **fit_params):
235234
step, param = pname.split('__', 1)
236235
fit_params_steps[step][param] = pval
237236
Xt = X
238-
for step_idx, (name, transformer) in enumerate(self._iter()):
237+
for step_idx, (name, transformer) in enumerate(
238+
self._iter(include_final_estimator=False)):
239239
if hasattr(memory, 'location'):
240240
# joblib >= 0.12
241241
if memory.location is None:
@@ -356,7 +356,7 @@ def predict(self, X, **predict_params):
356356
y_pred : array-like
357357
"""
358358
Xt = X
359-
for name, transform in self._iter():
359+
for name, transform in self._iter(include_final_estimator=False):
360360
Xt = transform.transform(Xt)
361361
return self.steps[-1][-1].predict(Xt, **predict_params)
362362

@@ -405,7 +405,7 @@ def predict_proba(self, X):
405405
y_proba : array-like, shape = [n_samples, n_classes]
406406
"""
407407
Xt = X
408-
for name, transform in self._iter():
408+
for name, transform in self._iter(include_final_estimator=False):
409409
Xt = transform.transform(Xt)
410410
return self.steps[-1][-1].predict_proba(Xt)
411411

@@ -424,7 +424,7 @@ def decision_function(self, X):
424424
y_score : array-like, shape = [n_samples, n_classes]
425425
"""
426426
Xt = X
427-
for name, transform in self._iter():
427+
for name, transform in self._iter(include_final_estimator=False):
428428
Xt = transform.transform(Xt)
429429
return self.steps[-1][-1].decision_function(Xt)
430430

@@ -443,7 +443,7 @@ def predict_log_proba(self, X):
443443
y_score : array-like, shape = [n_samples, n_classes]
444444
"""
445445
Xt = X
446-
for name, transform in self._iter():
446+
for name, transform in self._iter(include_final_estimator=False):
447447
Xt = transform.transform(Xt)
448448
return self.steps[-1][-1].predict_log_proba(Xt)
449449

@@ -472,7 +472,7 @@ def transform(self):
472472

473473
def _transform(self, X):
474474
Xt = X
475-
for _, transform in self._iter(include_final_estimator=True):
475+
for _, transform in self._iter():
476476
Xt = transform.transform(Xt)
477477
return Xt
478478

@@ -496,13 +496,13 @@ def inverse_transform(self):
496496
"""
497497
# raise AttributeError if necessary for hasattr behaviour
498498
# XXX: Handling the None case means we can't use if_delegate_has_method
499-
for _, transform in self._iter(include_final_estimator=True):
499+
for _, transform in self._iter():
500500
transform.inverse_transform
501501
return self._inverse_transform
502502

503503
def _inverse_transform(self, X):
504504
Xt = X
505-
reverse_iter = reversed(list(self._iter(include_final_estimator=True)))
505+
reverse_iter = reversed(list(self._iter()))
506506
for _, transform in reverse_iter:
507507
Xt = transform.inverse_transform(Xt)
508508
return Xt
@@ -530,7 +530,7 @@ def score(self, X, y=None, sample_weight=None):
530530
score : float
531531
"""
532532
Xt = X
533-
for name, transform in self._iter():
533+
for name, transform in 4843 self._iter(include_final_estimator=False):
534534
Xt = transform.transform(Xt)
535535
score_params = {}
536536
if sample_weight is not None:

0 commit comments

Comments
 (0)
0