8000 inheritance from _BasePCA · scikit-learn/scikit-learn@3125d33 · GitHub
[go: up one dir, main page]

Skip to content

Commit 3125d33

Browse files
author
giorgiop
committed
inheritance from _BasePCA
1 parent 13b489d commit 3125d33

File tree

1 file changed

+3
-74
lines changed

1 file changed

+3
-74
lines changed

sklearn/decomposition/pca.py

Lines changed: 3 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@
1717
from scipy import sparse
1818
from scipy.special import gammaln
1919

20-
from ..base import BaseEstimator, TransformerMixin
20+
from .base import _BasePCA
2121
from ..utils import deprecated
2222
from ..utils import check_random_state
2323
from ..utils import check_array
24-
from ..utils.extmath import fast_dot, fast_logdet, randomized_svd, svd_flip
24+
from ..utils.extmath import fast_logdet, randomized_svd, svd_flip
2525
from ..utils.validation import check_is_fitted
2626

2727

@@ -99,7 +99,7 @@ def _infer_dimension_(spectrum, n_samples, n_features):
9999
return ll.argmax()
100100

101101

102-
class PCA(BaseEstimator, TransformerMixin):
102+
class PCA(_BasePCA):
103103
"""Principal component analysis (PCA)
104104
105105
Linear dimensionality reduction using Singular Value Decomposition of the
@@ -433,26 +433,6 @@ def _fit_truncated(self, X, svd_solver):
433433

434434
return U, S, V
435435

436-
def get_covariance(self):
437-
"""Compute data covariance with the generative model.
438-
439-
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
440-
where S**2 contains the explained variances.
441-
442-
Returns
443-
-------
444-
cov : array, shape=(n_features, n_features)
445-
Estimated covariance of data.
446-
"""
447-
components_ = self.components_
448-
exp_var = self.explained_variance_
449-
if self.whiten:
450-
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
451-
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
452-
cov = np.dot(components_.T * exp_var_diff, components_)
453-
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
454-
return cov
455-
456436
def get_precision(self):
457437
"""Compute data precision matrix with the generative model.
458438
@@ -484,57 +464,6 @@ def get_precision(self):
484464
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
485465
return precision
486466

487-
def transform(self, X):
488-
"""Apply the dimensionality reduction on X.
489-
490-
X is projected on the first principal components previous extracted
491-
from a training set.
492-
493-
Parameters
494-
----------
495-
X : array-like, shape (n_samples, n_features)
496-
New data, where n_samples is the number of samples
497-
and n_features is the number of features.
498-
499-
Returns
500-
-------
501-
X_new : array-like, shape (n_samples, n_components)
502-
503-
"""
504-
check_is_fitted(self, 'mean_')
505-
506-
X = check_array(X)
507-
if self.mean_ is not None:
508-
X = X - self.mean_
509-
X_transformed = fast_dot(X, self.components_.T)
510-
if self.whiten:
511-
X_transformed /= np.sqrt(self.explained_variance_)
512-
return X_transformed
513-
514-
def inverse_transform(self, X):
515-
"""Transform data back to its original space, i.e.,
516-
return an input X_original whose transform would be X
517-
518-
Parameters
519-
----------
520-
X : array-like, shape (n_samples, n_components)
521-
New data, where n_samples is the number of samples
522-
and n_components is the number of components.
523-
524-
Returns
525-
-------
526-
X_original array-like, shape (n_samples, n_features)
527-
"""
528-
check_is_fitted(self, 'mean_')
529-
530-
if self.whiten:
531-
return fast_dot(
532-
X,
533-
np.sqrt(self.explained_variance_[:, np.newaxis]) *
534-
self.components_) + self.mean_
535-
else:
536-
return fast_dot(X, self.components_) + self.mean_
537-
538467
def score_samples(self, X):
539468
"""Return the log-likelihood of each sample
540469

0 commit comments

Comments
 (0)
0