10000 remove references to old GP, GMM and sparse_center_data · scikit-learn/scikit-learn@6558ae8 · GitHub
[go: up one dir, main page]

Skip to content

Commit 6558ae8

Browse files
amuellerJoan Massich
authored and
Joan Massich
committed
remove references to old GP, GMM and sparse_center_data
1 parent a1bc2ed commit 6558ae8

File tree

2 files changed

+1
-87
lines changed

2 files changed

+1
-87
lines changed

sklearn/linear_model/tests/test_base.py

Lines changed: 0 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,14 @@
66
import numpy as np
77
from scipy import sparse
88
from scipy import linalg
9-
from itertools import product
109

1110

1211
from sklearn.utils.testing import assert_array_almost_equal
1312
from sklearn.utils.testing import assert_almost_equal
1413
from sklearn.utils.testing import assert_equal
15-
from sklearn.utils.testing import ignore_warnings
1614

1715
from sklearn.linear_model.base import LinearRegression
1816
from sklearn.linear_model.base import _preprocess_data
19-
from sklearn.linear_model.base import sparse_center_data, center_data
2017
from sklearn.linear_model.base import _rescale_data
2118
from sklearn.utils import check_random_state
2219
from sklearn.utils.testing import assert_greater
@@ -402,74 +399,3 @@ def test_rescale_data():
402399
rescaled_y2 = y * np.sqrt(sample_weight)
403400
assert_array_almost_equal(rescaled_X, rescaled_X2)
404401
assert_array_almost_equal(rescaled_y, rescaled_y2)
405-
406-
407-
@ignore_warnings # all deprecation warnings
408-
def test_deprecation_center_data():
409-
n_samples = 200
410-
n_features = 2
411-
412-
w = 1.0 + rng.rand(n_samples)
413-
X = rng.rand(n_samples, n_features)
414-
y = rng.rand(n_samples)
415-
416-
param_grid = product([True, False], [True, False], [True, False],
417-
[None, w])
418-
419-
for (fit_intercept, normalize, copy, sample_weight) in param_grid:
420-
421-
XX = X.copy() # such that we can try copy=False as well
422-
423-
X1, y1, X1_mean, X1_var, y1_mean = \
424-
center_data(XX, y, fit_intercept=fit_intercept,
425-
normalize=normalize, copy=copy,
426-
sample_weight=sample_weight)
427-
428-
XX = X.copy()
429-
430-
X2, y2, X2_mean, X2_var, y2_mean = \
431-
_preprocess_data(XX, y, fit_intercept=fit_intercept,
432-
normalize=normalize, copy=copy,
433-
sample_weight=sample_weight)
434-
435-
assert_array_almost_equal(X1, X2)
436-
assert_array_almost_equal(y1, y2)
437-
assert_array_almost_equal(X1_mean, X2_mean)
438-
assert_array_almost_equal(X1_var, X2_var)
439-
assert_array_almost_equal(y1_mean, y2_mean)
440-
441-
# Sparse cases
442-
X = sparse.csr_matrix(X)
443-
444-
for (fit_intercept, normalize, copy, sample_weight) in param_grid:
445-
446-
X1, y1, X1_mean, X1_var, y1_mean = \
447-
center_data(X, y, fit_intercept=fit_intercept, normalize=normalize,
448-
copy=copy, sample_weight=sample_weight)
449-
450-
X2, y2, X2_mean, X2_var, y2_mean = \
451-
_preprocess_data(X, y, fit_intercept=fit_intercept,
452-
normalize=normalize, copy=copy,
453-
sample_weight=sample_weight, return_mean=False)
454-
455-
assert_array_almost_equal(X1.toarray(), X2.toarray())
456-
assert_array_almost_equal(y1, y2)
457-
assert_array_almost_equal(X1_mean, X2_mean)
458-
assert_array_almost_equal(X1_var, X2_var)
459-
assert_array_almost_equal(y1_mean, y2_mean)
460-
461-
for (fit_intercept, normalize) in product([True, False], [True, False]):
462-
463-
X1, y1, X1_mean, X1_var, y1_mean = \
464-
sparse_center_data(X, y, fit_intercept=fit_intercept,
465-
normalize=normalize)
466-
467-
X2, y2, X2_mean, X2_var, y2_mean = \
468-
_preprocess_data(X, y, fit_intercept=fit_intercept,
469-
normalize=normalize, return_mean=True)
470-
471-
assert_array_almost_equal(X1.toarray(), X2.toarray())
472-
assert_array_almost_equal(y1, y2)
473-
assert_array_almost_equal(X1_mean, X2_mean)
474-
assert_array_almost_equal(X1_var, X2_var)
475-
assert_array_almost_equal(y1_mean, y2_mean)

sklearn/mixture/__init__.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,9 @@
22
The :mod:`sklearn.mixture` module implements mixture modeling algorithms.
33
"""
44

5-
from .gmm import sample_gaussian, log_multivariate_normal_density
6-
from .gmm import GMM, distribute_covar_matrix_to_match_covariance_type
7-
from .gmm import _validate_covars
8-
from .dpgmm import DPGMM, VBGMM
9-
105
from .gaussian_mixture import GaussianMixture
116
from .bayesian_mixture import BayesianGaussianMixture
127

138

14-
__all__ = ['DPGMM',
15-
'GMM',
16-
'VBGMM',
17-
'_validate_covars',
18-
'distribute_covar_matrix_to_match_covariance_type',
19-
'log_multivariate_normal_density',
20-
'sample_gaussian',
21-
'GaussianMixture',
9+
__all__ = ['GaussianMixture',
2210
'BayesianGaussianMixture']

0 commit comments

Comments
 (0)
0