@@ -214,9 +214,16 @@ def _fit_transform_one(transformer, name, X, y, transformer_weights,
214
214
** fit_params ):
215
215
if transformer_weights is not None and name in transformer_weights :
216
216
# if we have a weight for this transformer, muliply output
217
- return (transformer .fit_transform (X , y , ** fit_params )
218
- * transformer_weights [name ])
219
- return transformer .fit_transform (X , y , ** fit_params )
217
+ if hasattr (transformer , 'fit_transform' ):
218
+ return (transformer .fit_transform (X , y , ** fit_params )
219
+ * transformer_weights [name ])
220
+ else :
221
+ return (transformer .fit (X , y , ** fit_params ).transform (X )
222
+ * transformer_weights [name ])
223
+ if hasattr (transformer , 'fit_transform' ):
224
+ return transformer .fit_transform (X , y , ** fit_params )
225
+ else :
226
+ return transformer .fit (X , y , ** fit_params ).transform (X )
220
227
221
228
222
229
class FeatureUnion (BaseEstimator , TransformerMixin ):
@@ -275,7 +282,7 @@ def fit(self, X, y=None):
275
282
276
283
def fit_transform (self , X , y = None , ** fit_params ):
277
284
"""Fit all tranformers using X, transform the data and concatenate
278
- results. Valid only if all transformers implement fit_transform.
285
+ results.
279
286
280
287
Parameters
281
288
----------
0 commit comments