8000 coordinate_descent.py, least_angle.py fixed · scikit-learn/scikit-learn@610c21b · GitHub
[go: up one dir, main page]

Skip to content

Commit 610c21b

Browse files
author
giorgiop
committed
coordinate_descent.py, least_angle.py fixed
1 parent 3e96599 commit 610c21b

File tree

7 files changed

+194
-187
lines changed

7 files changed

+194
-187
lines changed

sklearn/linear_model/base.py

+16-25
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def make_dataset(X, y, sample_weight, random_state=None):
6868

6969
# TODO: this reproduces the behavior prior 0.17
7070
# Must be remove in 0.19
71-
def _sparse_center_data(X, y, fit_intercept, normalize=None):
71+
def _deprecated_sparse_center_data(X, y, fit_intercept, normalize=None):
7272
if fit_intercept:
7373
# we might require not to change the csr matrix sometimes
7474
# store a copy if normalize is True.
@@ -112,7 +112,7 @@ def sparse_center_data(X, y, fit_intercept, normalize=False,
112112
"version 0.17 and will be removed in 0.19. If you want "
113113
"to standardize the data instead, use"
114114
"`standardize=True`", DeprecationWarning)
115-
return _sparse_center_data(X, y, fit_intercept, normalize)
115+
return _deprecated_sparse_center_data(X, y, fit_intercept, normalize)
116116

117117
if fit_intercept:
118118
# we might require not to change the csr matrix sometimes
@@ -145,8 +145,8 @@ def sparse_center_data(X, y, fit_intercept, normalize=False,
145145

146146
# TODO: this reproduces the behavior prior 0.17
147147
# Must be remove in 0.19
148-
def _center_data(X, y, fit_intercept, normalize=False, copy=True,
149-
sample_weight=None):
148+
def _deprecated_center_data(X, y, fit_intercept, normalize=False, copy=True,
149+
sample_weight=None):
150150

151151
X = as_float_array(X, copy)
152152
if fit_intercept:
@@ -188,8 +188,8 @@ def center_data(X, y, fit_intercept, normalize=False, standardize=False,
188188
"version 0.17 and will be removed in 0.19. If you want "
189189
"to standardize the data instead, use"
190190
"`standardize=True`", DeprecationWarning)
191-
return _center_data(X, y, fit_intercept, normalize, copy,
192-
sample_weight)
191+
return _deprecated_center_data(X, y, fit_intercept, normalize, copy,
192+
sample_weight)
193193

194194
X = as_float_array(X, copy)
195195
if fit_intercept:
@@ -462,24 +462,12 @@ class LinearRegression(LinearModel, RegressorMixin):
462462

463463
def __init__(self, fit_intercept=True, normalize=False, standardize=False,
464464
copy_X=True, n_jobs=1):
465-
if normalize:
466-
warnings.warn("The `normalize` parameter is not in use anymore "
467-
"from version 0.17 and will be removed in 0.19. If "
468-
"you want the data to be standardized instead, use "
469-
"`standardize=True`", DeprecationWarning)
470465
self.fit_intercept = fit_intercept
466+
self.normalize = normalize
471467
self.standardize = standardize
472468
self.copy_X = copy_X
473469
self.n_jobs = n_jobs
474470

475-
@property
476-
@deprecated("The `normalize` attribute is not in use anymore "
477-
"from version 0.17 and will be removed in 0.19. If "
478-
"you want the data to be standardized instead, use "
479-
"`standardize=True`")
480-
def normalize(self):
481-
return None
482-
483471
def fit(self, X, y, sample_weight=None):
484472
"""
485473
Fit linear model.
@@ -509,7 +497,7 @@ def fit(self, X, y, sample_weight=None):
509497
sample_weight = column_or_1d(sample_weight, warn=True)
510498

511499
X, y, X_mean, y_mean, X_std = self._center_data(
512-
X, y, fit_intercept=self.fit_intercept,
500+
X, y, fit_intercept=self.fit_intercept, normalize=self.normalize,
513501
standardize=self.standardize, copy=self.copy_X,
514502
sample_weight=sample_weight)
515503

@@ -540,22 +528,25 @@ def fit(self, X, y, sample_weight=None):
540528
return self
541529

542530

543-
def _pre_fit(X, y, Xy, precompute, standardize, fit_intercept, copy):
531+
def _pre_fit(X, y, Xy, precompute, normalize, standardize, fit_intercept,
532+
copy):
544533
"""Aux function used at beginning of fit in linear models"""
545534
n_samples, n_features = X.shape
546535

547536
if sparse.isspmatrix(X):
548537
precompute = False
549538
X, y, X_mean, y_mean, X_std = sparse_center_data(
550-
X, y, fit_intercept=fit_intercept, standardize=standardize)
539+
X, y, fit_intercept=fit_intercept, normalize=normalize,
540+
standardize=standardize)
551541
else:
552542
# copy was done in fit if necessary
553543
X, y, X_mean, y_mean, X_std = center_data(
554-
X, y, fit_intercept=fit_intercept, standardize=standardize,
555-
copy=copy)
544+
X, y, fit_intercept=fit_intercept, normalize=normalize,
545+
standardize=standardize, copy=copy)
556546
if hasattr(precompute, '__array__') and (
557547
fit_intercept and not np.allclose(X_mean, np.zeros(n_features)) or
558-
standardize and not np.allclose(X_std, np.ones(n_features))):
548+
normalize or standardize or not
549+
np.allclose(X_std, np.ones(n_features))):
559550
warnings.warn("Gram matrix was provided but X was centered"
560551
" to fit intercept, "
561552
"or X was standardized : recomputing Gram matrix.",

sklearn/linear_model/bayes.py

+11-14
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,11 @@
1616
from ..base import RegressorMixin
1717
from ..utils.extmath import fast_logdet, pinvh
1818
from ..utils import check_X_y
19-
from ..utils import deprecated
2019

2120
###############################################################################
2221
# BayesianRidge regression
2322

23+
2424
class BayesianRidge(LinearModel, RegressorMixin):
2525
"""Bayesian ridge regression
2626
@@ -97,7 +97,8 @@ class BayesianRidge(LinearModel, RegressorMixin):
9797
... # doctest: +NORMALIZE_WHITESPACE
9898
BayesianRidge(alpha_1=1e-06, alpha_2=1e-06, compute_score=False,
9999
copy_X=True, fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06,
100-
n_iter=300, standardize=False, tol=0.001, verbose=False)
100+
n_iter=300, normalize=False, standardize=False, tol=0.001,
101+
verbose=False)
101102
>>> clf.predict([[1, 1]])
102103
array([ 1.])
103104
@@ -124,18 +125,11 @@ def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6,
124125
self.lambda_2 = lambda_2
125126
self.compute_score = compute_score
126127
self.fit_intercept = fit_intercept
128+
self.normalize = normalize
127129
self.standardize = standardize
128130
self.copy_X = copy_X
129131
self.verbose = verbose
130132

131-
@property
132-
@deprecated("The `normalize` attribute is not in use anymore "
133-
"from version 0.17 and will be removed in 0.19. If "
134-
"you want the data to be standardized instead, use "
135-
"`standardize=True`")
136-
def normalize(self):
137-
return None
138-
139133
def fit(self, X, y):
140134
"""Fit the model
141135
@@ -152,7 +146,8 @@ def fit(self, X, y):
152146
"""
153147
X, y = check_X_y(X, y, dtype=np.float64, y_numeric=True)
154148
X, y, X_mean, y_mean, X_std = self._center_data(
155-
X, y, self.fit_intercept, self.standardize, self.copy_X)
149+
X, y, self.fit_intercept, normalize=self.normalize,
150+
standardize=self.standardize, copy=self.copy_X)
156151
n_samples, n_features = X.shape
157152

158153
# Initialization of the values of the parameters
@@ -317,8 +312,8 @@ class ARDRegression(LinearModel, RegressorMixin):
317312
... # doctest: +NORMALIZE_WHITESPACE
318313
ARDRegression(alpha_1=1e-06, alpha_2=1e-06, compute_score=False,
319314
copy_X=True, fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06,
320-
n_iter=300, standardize=False, threshold_lambda=10000.0, tol=0.001,
321-
verbose=False)
315+
n_iter=300, normalize=False, standardize=False,
316+
threshold_lambda=10000.0, tol=0.001, verbose=False)
322317
>>> clf.predict([[1, 1]])
323318
array([ 1.])
324319
@@ -340,6 +335,7 @@ def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6,
340335
self.n_iter = n_iter
341336
self.tol = tol
342337
self.fit_intercept = fit_intercept
338+
self.normalize = normalize
343339
self.standardize = standardize
344340
self.alpha_1 = alpha_1
345341
self.alpha_2 = alpha_2
@@ -374,7 +370,8 @@ def fit(self, X, y):
374370
coef_ = np.zeros(n_features)
375371

376372
X, y, X_mean, y_mean, X_std = self._center_data(
377-
X, y, self.fit_intercept, self.standardize, self.copy_X)
373+
X, y, self.fit_intercept, normalize=self.normalize,
374+
standardize=self.standardize, copy=self.copy_X)
378375

379376
# Launch the convergence loop
380377
keep_lambda = np.ones(n_features, dtype=bool)

0 commit comments

Comments
 (0)
0