diff --git a/sklearn/decomposition/fastica_.py b/sklearn/decomposition/fastica_.py index dd04e8e93a1c6..d841926cdfc87 100644 --- a/sklearn/decomposition/fastica_.py +++ b/sklearn/decomposition/fastica_.py @@ -436,11 +436,18 @@ def my_g(x): mixing_ : array, shape (n_features, n_components) The mixing matrix. + mean_ : array, shape(n_features) + The mean over features. Only set if `self.whiten` is True. + n_iter_ : int If the algorithm is "deflation", n_iter is the maximum number of iterations run across all components. Else they are just the number of iterations taken to converge. + whitening_ : array, shape (n_components, n_features) + Only set if whiten is 'True'. This is the pre-whitening matrix + that projects data onto the first `n_components` principal components. + Examples -------- >>> from sklearn.datasets import load_digits diff --git a/sklearn/decomposition/nmf.py b/sklearn/decomposition/nmf.py index ae624e2a1b4ab..f64bc34b7fad7 100644 --- a/sklearn/decomposition/nmf.py +++ b/sklearn/decomposition/nmf.py @@ -1192,6 +1192,11 @@ class NMF(BaseEstimator, TransformerMixin): components_ : array, [n_components, n_features] Factorization matrix, sometimes called 'dictionary'. + n_components_ : integer + The number of components. It is same as the `n_components` parameter + if it was given. Otherwise, it will be same as the number of + features. + reconstruction_err_ : number Frobenius norm of the matrix difference, or beta-divergence, between the training data ``X`` and the reconstructed data ``WH`` from diff --git a/sklearn/decomposition/pca.py b/sklearn/decomposition/pca.py index ccde667d0d20d..f8c28e1373b6e 100644 --- a/sklearn/decomposition/pca.py +++ b/sklearn/decomposition/pca.py @@ -237,6 +237,12 @@ class PCA(_BasePCA): n_components, or the lesser value of n_features and n_samples if n_components is None. + n_features_ : int + Number of features in the training data. + + n_samples_ : int + Number of samples in the training data. + noise_variance_ : float The estimated noise covariance following the Probabilistic PCA model from Tipping and Bishop 1999. See "Pattern Recognition and diff --git a/sklearn/random_projection.py b/sklearn/random_projection.py index f4fa2c608b842..8297a42ab17f8 100644 --- a/sklearn/random_projection.py +++ b/sklearn/random_projection.py @@ -453,7 +453,7 @@ class GaussianRandomProjection(BaseRandomProjection): Attributes ---------- - n_component_ : int + n_components_ : int Concrete number of components computed when n_components="auto". components_ : numpy array of shape [n_components, n_features] @@ -573,7 +573,7 @@ class SparseRandomProjection(BaseRandomProjection): Attributes ---------- - n_component_ : int + n_components_ : int Concrete number of components computed when n_components="auto". components_ : CSR matrix with shape [n_components, n_features]