8000 DOC Ensures that PowerTransformer passes numpydoc validation (#21015) · scikit-learn/scikit-learn@b9b7de4 · GitHub
[go: up one dir, main page]

Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b9b7de4

Browse files
jmloyolaglemaitre
authored andcommitted
DOC Ensures that PowerTransformer passes numpydoc validation (#21015)
Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent 09722d8 commit b9b7de4

File tree

2 files changed

+31
-16
lines changed

2 files changed

+31
-16
lines changed

maint_tools/test_docstrings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
"PassiveAggressiveRegressor",
4343
"PatchExtractor",
4444
"PolynomialFeatures",
45-
"PowerTransformer",
4645
"QuadraticDiscriminantAnalysis",
4746
"QuantileRegressor",
4847
"QuantileTransformer",

sklearn/preprocessing/_data.py

Lines changed: 31 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2974,21 +2974,6 @@ class PowerTransformer(_OneToOneFeatureMixin, TransformerMixin, BaseEstimator):
29742974
29752975
.. versionadded:: 1.0
29762976
2977-
Examples
2978-
--------
2979-
>>> import numpy as np
2980-
>>> from sklearn.preprocessing import PowerTransformer
2981-
>>> pt = PowerTransformer()
2982-
>>> data = [[1, 2], [3, 2], [4, 5]]
2983-
>>> print(pt.fit(data))
2984-
PowerTransformer()
2985-
>>> print(pt.lambdas_)
2986-
[ 1.386... -3.100...]
2987-
>>> print(pt.transform(data))
2988-
[[-1.316... -0.707...]
2989-
[ 0.209... -0.707...]
2990-
[ 1.106... 1.414...]]
2991-
29922977
See Also
29932978
--------
29942979
power_transform : Equivalent function without the estimator API.
@@ -3014,6 +2999,21 @@ class PowerTransformer(_OneToOneFeatureMixin, TransformerMixin, BaseEstimator):
30142999
30153000
.. [2] G.E.P. Box and D.R. Cox, "An Analysis of Transformations", Journal
30163001
of the Royal Statistical Society B, 26, 211-252 (1964).
3002+
3003+
Examples
3004+
--------
3005+
>>> import numpy as np
3006+
>>> from sklearn.preprocessing import PowerTransformer
3007+
>>> pt = PowerTransformer()
3008+
>>> data = [[1, 2], [3, 2], [4, 5]]
3009+
>>> print(pt.fit(data))
3010+
PowerTransformer()
3011+
>>> print(pt.lambdas_)
3012+
[ 1.386... -3.100...]
3013+
>>> print(pt.transform(data))
3014+
[[-1.316... -0.707...]
3015+
[ 0.209... -0.707...]
3016+
[ 1.106... 1.414...]]
30173017
"""
30183018

30193019
def __init__(self, method="yeo-johnson", *, standardize=True, copy=True):
@@ -3044,6 +3044,22 @@ def fit(self, X, y=None):
30443044
return self
30453045

30463046
def fit_transform(self, X, y=None):
3047+
"""Fit `PowerTransformer` to `X`, then transform `X`.
3048+
3049+
Parameters
3050+
----------
3051+
X : array-like of shape (n_samples, n_features)
3052+
The data used to estimate the optimal transformation parameters
3053+
and to be transformed using a power transformation.
3054+
3055+
y : Ignored
3056+
Not used, present for API consistency by convention.
3057+
3058+
Returns
3059+
-------
3060+
X_new : ndarray of shape (n_samples, n_features)
3061+
Transformed data.
3062+
"""
30473063
return self._fit(X, y, force_transform=True)
30483064

30493065
def _fit(self, X, y=None, force_transform=False):

0 commit comments

Comments
 (0)
0