8000 COSMIT rm deprecated stuff -- lots of it · scikit-learn/scikit-learn@77e69ed · GitHub
[go: up one dir, main page]

Skip to content

Commit 77e69ed

Browse files
committed
COSMIT rm deprecated stuff -- lots of it
1 parent 46f4c55 commit 77e69ed

12 files changed

+32
-174
lines changed

doc/whats_new.rst

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,21 @@ Changelog
1616

1717
- Speed up of :func:`metrics.precision_recall_curve` by Conrad Lee.
1818

19+
API changes summary
20+
-------------------
21+
22+
- The module ``sklearn.linear_model.sparse`` is gone. Sparse matrix support
23+
was already integrated into the "regular" linear models.
24+
25+
- ``sklearn.metrics.mean_square_error``, which incorrectly returned the
26+
cumulated error, was removed. Use ``mean_squared_error`` instead.
27+
28+
- Passing ``class_weight`` parameters to ``fit`` methods is no longer
29+
supported. Pass them to estimator constuctors instead.
30+
31+
- GMMs no longer have ``decode`` and ``rvs`` methods. Use the ``score``,
32+
``predict`` or ``sample`` methods instead.
33+
1934

2035
.. _changes_0_12:
2136

sklearn/grid_search.py

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -329,10 +329,10 @@ def __init__(self, estimator, param_grid, loss_func=None, score_func=None,
329329
self.pre_dispatch = pre_dispatch
330330

331331
def _set_methods(self):
332-
if hasattr(self._best_estimator_, 'predict'):
333-
self.predict = self._best_estimator_.predict
334-
if hasattr(self._best_estimator_, 'predict_proba'):
335-
self.predict_proba = self._best_estimator_.predict_proba
332+
if hasattr(self.best_estimator_, 'predict'):
333+
self.predict = self.best_estimator_.predict
334+
if hasattr(self.best_estimator_, 'predict_proba'):
335+
self.predict_proba = self.best_estimator_.predict_proba
336336

337337
def fit(self, X, y=None, **params):
338338
"""Run fit with all sets of parameters
@@ -379,7 +379,7 @@ def _fit(self, X, y):
379379
params = next(iter(grid))
380380
base_clf.set_params(**params)
381381
base_clf.fit(X, y)
382-
self._best_estimator_ = base_clf
382+
self.best_estimator_ = base_clf
383383
self._set_methods()
384384
return self
385385

@@ -434,7 +434,7 @@ def _fit(self, X, y):
434434
# clone first to work around broken estimators
435435
best_estimator = clone(base_clf).set_params(**best_params)
436436
best_estimator.fit(X, y, **self.fit_params)
437-
self._best_estimator_ = best_estimator
437+
self.best_estimator_ = best_estimator
438438
self._set_methods()
439439

440440
# Store the computed scores
@@ -455,14 +455,3 @@ def score(self, X, y=None):
455455
% self.best_estimator_)
456456
y_predicted = self.predict(X)
457457
return self.score_func(y, y_predicted)
458-
459-
# TODO around 0.13: remove this property, make it an attribute
460-
@property
461-
def best_estimator_(self):
462-
if hasattr(self, '_best_estimator_'):
463-
return self._best_estimator_
464-
else:
465-
raise RuntimeError("Grid search has to be run with 'refit=True'"
466-
" to make predictions or obtain an instance of the best "
467-
" estimator. To obtain the best parameter settings, "
468-
" use ``best_params_``.")

sklearn/linear_model/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from .perceptron import Perceptron
2727
from .randomized_l1 import RandomizedLasso, RandomizedLogisticRegression, \
2828
lasso_stability_path
29-
from . import sparse
3029
from .isotonic_regression_ import IsotonicRegression
3130

3231
__all__ = ['ARDRegression',
@@ -65,5 +64,4 @@
6564
'lasso_stability_path',
6665
'orthogonal_mp',
6766
'orthogonal_mp_gram',
68-
'ridge_regression',
69-
'sparse']
67+
'ridge_regression']

sklearn/linear_model/sparse/__init__.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

sklearn/linear_model/sparse/coordinate_descent.py

Lines changed: 0 additions & 15 deletions
This file was deleted.

sklearn/linear_model/sparse/logistic.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

sklearn/linear_model/sparse/stochastic_gradient.py

Lines changed: 0 additions & 15 deletions
This file was deleted.

sklearn/linear_model/stochastic_gradient.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -450,8 +450,7 @@ def _partial_fit(self, X, y, n_iter, classes=None, sample_weight=None,
450450

451451
return self
452452

453-
def partial_fit(self, X, y, classes=None,
454-
class_weight=None, sample_weight=None):
453+
def partial_fit(self, X, y, classes=None, sample_weight=None):
455454
"""Fit linear model with Stochastic Gradient Descent.
456455
457456
Parameters
@@ -478,12 +477,6 @@ def partial_fit(self, X, y, classes=None,
478477
-------
479478
self : returns an instance of self.
480479
"""
481-
if class_weight is not None:
482-
warnings.warn("Using 'class_weight' as a parameter to the 'fit'"
483-
"method is deprecated and will be removed in 0.13. "
484-
"Set it on initialization instead.",
485-
DeprecationWarning, stacklevel=2)
486-
self.class_weight = class_weight
487480
return self._partial_fit(X, y, n_iter=1, classes=classes,
488481
sample_weight=sample_weight)
489482

sklearn/metrics/__init__.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@
33
and pairwise metrics and distance computations.
44
"""
55

6-
from .metrics import confusion_matrix, roc_curve, auc, precision_score, \
7-
recall_score, fbeta_score, f1_score, zero_one_score, \
8-
precision_recall_fscore_support, classification_report, \
9-
precision_recall_curve, explained_variance_score, r2_score, \
10-
zero_one, mean_square_error, hinge_loss, matthews_corrcoef, \
11-
mean_squared_error, average_precision_score, auc_score
6+
from .metrics import (confusion_matrix, roc_curve, auc, precision_score,
7+
recall_score, fbeta_score, f1_score, zero_one_score,
8+
precision_recall_fscore_support, classification_report,
9+
precision_recall_curve, explained_variance_score,
10+
r2_score, zero_one, hinge_loss, matthews_corrcoef,
11+
mean_squared_error, average_precision_score, auc_score)
1212

1313
from . import cluster
1414
from .cluster import adjusted_rand_score

sklearn/metrics/metrics.py

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1027,30 +1027,6 @@ def mean_squared_error(y_true, y_pred):
10271027
return np.mean((y_pred - y_true) ** 2)
10281028

10291029

1030-
@deprecated("""Incorrectly returns the cumulated error: use mean_squared_error
1031-
instead; to be removed in v0.13""")
1032-
def mean_square_error(y_true, y_pred):
1033-
"""Cumulated square error regression loss
1034-
1035-
Positive floating point value: the best value is 0.0.
1036-
1037-
return the mean square error
1038-
1039-
Parameters
1040-
----------
1041-
y_true : array-like
1042-
1043-
y_pred : array-like
1044-
1045-
Returns
1046-
-------
1047-
loss : float
1048-
1049-
"""
1050-
y_true, y_pred = check_arrays(y_true, y_pred)
1051-
return np.linalg.norm(y_pred - y_true) ** 2
1052-
1053-
10541030
def hinge_loss(y_true, pred_decision, pos_label=1, neg_label=-1):
10551031
"""
10561032
Cumulated hinge loss (non-regularized).

sklearn/mixture/gmm.py

Lines changed: 1 addition & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -308,31 +308,6 @@ def eval(self, X):
308308
responsibilities = np.exp(lpr - logprob[:, np.newaxis])
309309
return logprob, responsibilities
310310

311-
@deprecated("""will be removed in v0.13;
312-
use the score or predict method instead, depending on the question""")
313-
def decode(self, X):
314-
"""Find most likely mixture components for each point in X.
315-
316-
DEPRECATED IN VERSION 0.11; WILL BE REMOVED IN VERSION 0.13.
317-
use the score or predict method instead, depending on the question.
318-
319-
Parameters
320-
----------
321-
X : array_like, shape (n, n_features)
322-
List of n_features-dimensional data points. Each row
323-
corresponds to a single data point.
324-
325-
Returns
326-
-------
327-
logprobs : array_like, shape (n_samples,)
328-
Log probability of each point in `obs` under the model.
329-
330-
components : array_like, shape (n_samples,)
331-
Index of the most likelihod mixture components for each observation
332-
"""
333-
logprob, posteriors = self.eval(X)
334-
return logprob, posteriors.argmax(axis=1)
335-
336311
def score(self, X):
337312
"""Compute the log probability under the model.
338313
@@ -381,16 +356,6 @@ def predict_proba(self, X):
381356
logprob, responsibilities = self.eval(X)
382357
return responsibilities
383358

384-
@deprecated("""will be removed in v0.13;
385-
use the score or predict method instead, depending on the question""")
386-
def rvs(self, n_samples=1, random_state=None):
387-
"""Generate random samples from the model.
388-
389-
DEPRECATED IN VERSION 0.11; WILL BE REMOVED IN VERSION 0.12
390-
use sample instead
391-
"""
392-
return self.sample(n_samples, random_state)
393-
394359
def sample(self, n_samples=1, random_state=None):
395360
"""Generate random samples from the model.
396361
@@ -431,7 +396,7 @@ def sample(self, n_samples=1, random_state=None):
431396
num_comp_in_X, random_state=random_state).T
432397
return X
433398

434-
def fit(self, X, **kwargs):
399+
def fit(self, X):
435400
"""Estimate model parameters with the expectation-maximization
436401
algorithm.
437402
@@ -455,24 +420,6 @@ def fit(self, X, **kwargs):
455420
raise ValueError(
456421
'GMM estimation with %s components, but got only %s samples' %
457422
(self.n_components, X.shape[0]))
458-
if kwargs:
459-
warnings.warn("Setting parameters in the 'fit' method is"
460-
"deprecated and will be removed in 0.13. Set it on "
461-
"initialization instead.", DeprecationWarning,
462-
stacklevel=2)
463-
# initialisations for in case the user still adds parameters to fit
464-
# so things don't break
465-
if 'n_iter' in kwargs:
466-
self.n_iter = kwargs['n_iter']
467-
if 'n_init' in kwargs:
468-
if kwargs['n_init'] < 1:
469-
raise ValueError('GMM estimation requires n_init > 0.')
470-
else:
471-
self.n_init = kwargs['n_init']
472-
if 'params' in kwargs:
473-
self.params = kwargs['params']
474-
if 'init_params' in kwargs:
475-
self.init_params = kwargs['init_params']
476423

477424
max_log_prob = -np.infty
478425

sklearn/svm/base.py

Lines changed: 2 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def _pairwise(self):
113113
kernel = self.kernel
114114
return kernel == "precomputed" or hasattr(kernel, "__call__")
115115

116-
def fit(self, X, y, class_weight=None, sample_weight=None):
116+
def fit(self, X, y, sample_weight=None):
117117
"""Fit the SVM model according to the given training data.
118118
119119
Parameters
@@ -160,13 +160,6 @@ def fit(self, X, y, class_weight=None, sample_weight=None):
160160
raise ValueError("The number of classes has to be greater than"
161161
" one.")
162162

163-
if class_weight != None:
164-
warnings.warn("'class_weight' is now an initialization parameter."
165-
"Using it in the 'fit' method is deprecated and "
166-
"will be removed in 0.13.", DeprecationWarning,
167-
stacklevel=2)
168-
self.class_weight = class_weight
169-
170163
sample_weight = np.asarray([] if sample_weight is None
171164
else sample_weight, dtype=np.float64)
172165
solver_type = LIBSVM_IMPL.index(self.impl)
@@ -624,7 +617,7 @@ def _get_solver_type(self):
624617
+ error_string)
625618
return self._solver_type_dict[solver_type]
626619

627-
def fit(self, X, y, class_weight=None):
620+
def fit(self, X, y):
628621
"""Fit the model according to the given training data.
629622
630623
Parameters
@@ -651,13 +644,6 @@ def fit(self, X, y, class_weight=None):
651644
raise ValueError("The number of classes has to be greater than"
652645
" one.")
653646

654-
if class_weight != None:
655-
warnings.warn("'class_weight' is now an initialization parameter."
656-
"Using it in the 'fit' method is deprecated and "
657-
"will be removed in 0.13.", DeprecationWarning,
658-
stacklevel=2)
659-
self.class_weight = class_weight
660-
661647
X = atleast2d_or_csr(X, dtype=np.float64, order="C")
662648
y = np.asarray(y, dtype=np.float64).ravel()
663649

0 commit comments

Comments
 (0)
0