10000 DOC Ensures FeatureUnion passes numpydoc validation (#20936) · scikit-learn/scikit-learn@4ae5f83 · GitHub
[go: up one dir, main page]

Skip to content

Commit 4ae5f83

Browse files
baam25simoglemaitre
authored andcommitted
DOC Ensures FeatureUnion passes numpydoc validation (#20936)
Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent 51a7bd5 commit 4ae5f83

File tree

2 files changed

+29
-12
lines changed

2 files changed

+29
-12
lines changed

maint_tools/test_docstrings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
# List of modules ignored when checking for numpydoc validation.
1111
DOCSTRING_IGNORE_LIST = [
1212
"Birch",
13-
"FeatureUnion",
1413
"GammaRegressor",
1514
"GaussianProcessRegressor",
1615
"GaussianRandomProjection",

sklearn/pipeline.py

Lines changed: 29 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -837,10 +837,12 @@ class FeatureUnion(TransformerMixin, _BaseComposition):
837837
838838
Parameters
839839
----------
840-
transformer_list : list of (string, transformer) tuples
841-
List of transformer objects to be applied to the data. The first
842-
half of each tuple is the name of the transformer. The tranformer can
843-
be 'drop' for it to be ignored.
840+
transformer_list : list of tuple
841+
List of tuple containing `(str, transformer)`. The first element
842+
of the tuple is name affected to the transformer while the
843+
second element is a scikit-learn transformer instance.
844+
The transformer instance can also be `"drop"` for it to be
845+
ignored.
844846
845847
.. versionchanged:: 0.22
846848
Deprecated `None` as a transformer in favor of 'drop'.
@@ -927,9 +929,17 @@ def set_params(self, **kwargs):
927929
you can directly set the parameters of the estimators contained in
928930
`tranformer_list`.
929931
932+
Parameters
933+
----------
934+
**kwargs : dict
935+
Parameters of this estimator or parameters of estimators contained
936+
in `transform_list`. Parameters of the transformers may be set
937+
using its name and the parameter name separated by a '__'.
938+
930939
Returns
931940
-------
932-
self
941+
self : object
942+
FeatureUnion class instance.
933943
"""
934944
self._set_params("transformer_list", **kwargs)
935945
return self
@@ -1005,10 +1015,13 @@ def fit(self, X, y=None, **fit_params):
10051015
y : array-like of shape (n_samples, n_outputs), default=None
10061016
Targets for supervised learning.
10071017
1018+
**fit_params : dict, default=None
1019+
Parameters to pass to the fit method of the estimator.
1020+
10081021
Returns
10091022
-------
1010-
self : FeatureUnion
1011-
This estimator
1023+
self : object
1024+
FeatureUnion class instance.
10121025
"""
10131026
transformers = self._parallel_func(X, y, fit_params, _fit_one)
10141027
if not transformers:
@@ -1029,12 +1042,15 @@ def fit_transform(self, X, y=None, **fit_params):
10291042
y : array-like of shape (n_samples, n_outputs), default=None
10301043
Targe 103A5 ts for supervised learning.
10311044
1045+
**fit_params : dict, default=None
1046+
Parameters to pass to the fit method of the estimator.
1047+
10321048
Returns
10331049
-------
10341050
X_t : array-like or sparse matrix of \
10351051
shape (n_samples, sum_n_components)
1036-
hstack of results of transformers. sum_n_components is the
1037-
sum of n_components (output dimension) over transformers.
1052+
The `hstack` of results of transformers. `sum_n_components` is the
1053+
sum of `n_components` (output dimension) over transformers.
10381054
"""
10391055
results = self._parallel_func(X, y, fit_params, _fit_transform_one)
10401056
if not results:
@@ -1083,8 +1099,8 @@ def transform(self, X):
10831099
-------
10841100
X_t : array-like or sparse matrix of \
10851101
shape (n_samples, sum_n_components)
1086-
hstack of results of transformers. sum_n_components is the
1087-
sum of n_components (output dimension) over transformers.
1102+
The `hstack` of results of transformers. `sum_n_components` is the
1103+
sum of `n_components` (output dimension) over transformers.
10881104
"""
10891105
Xs = Parallel(n_jobs=self.n_jobs)(
10901106
delayed(_transform_one)(trans, X, None, weight)
@@ -1112,6 +1128,8 @@ def _update_transformer_list(self, transformers):
11121128

11131129
@property
11141130
def n_features_in_(self):
1131+
"""Number of features seen during :term:`fit`."""
1132+
11151133
# X is passed to all transformers so we just delegate to the first one
11161134
return self.transformer_list[0][1].n_features_in_
11171135

0 commit comments

Comments
 (0)
0