@@ -684,20 +684,6 @@ def _unnormalized_transform(self, X):
684
684
doc_topic_distr : ndarray of shape (n_samples, n_components)
685
685
Document topic distribution for X.
686
686
"""
687
- check_is_fitted (self )
688
-
689
- # make sure feature size is the same in fitted model and in X
690
- X = self ._check_non_neg_array (
691
- X , reset_n_features = True , whom = "LatentDirichletAllocation.transform"
692
- )
693
- n_samples , n_features = X .shape
694
- if n_features != self .components_ .shape [1 ]:
695
- raise ValueError (
696
- "The provided data has %d dimensions while "
697
- "the model was trained with feature size %d."
698
- % (n_features , self .components_ .shape [1 ])
699
- )
700
-
701
687
doc_topic_distr , _ = self ._e_step (X , cal_sstats = False , random_init = False )
702
688
703
689
return doc_topic_distr
@@ -851,12 +837,6 @@ def _perplexity_precomp_distr(self, X, doc_topic_distr=None, sub_sampling=False)
851
837
score : float
852
838
Perplexity score.
853
839
"""
854
- check_is_fitted (self )
855
-
856
- X = self ._check_non_neg_array (
857
- X , reset_n_features = True , whom = "LatentDirichletAllocation.perplexity"
858
- )
859
-
860
840
if doc_topic_distr is None :
861
841
doc_topic_distr = self ._unnormalized_transform (X )
862
842
else :
@@ -902,4 +882,8 @@ def perplexity(self, X, sub_sampling=False):
902
882
score : float
903
883
Perplexity score.
904
884
"""
885
+ check_is_fitted (self )
886
+ X = self ._check_non_neg_array (
887
+ X , reset_n_features = True , whom = "LatentDirichletAllocation.perplexity"
888
+ )
905
889
return self ._perplexity_precomp_distr (X , sub_sampling = sub_sampling )
0 commit comments