10000 FIX class_weight in LogisticRegression and LogisticRegressionCV · scikit-learn/scikit-learn@3a649ce · GitHub
[go: up one dir, main page]

Skip to content

Commit 3a649ce

Browse files
committed
FIX class_weight in LogisticRegression and LogisticRegressionCV
1 parent d4e9d79 commit 3a649ce

File tree

3 files changed

+80
-27
lines changed

3 files changed

+80
-27
lines changed

sklearn/linear_model/logistic.py

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -594,11 +594,11 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
594594
sample_weight = np.ones(X.shape[0])
595595

596596
# If class_weights is a dict (provided by the user), the weights
10000
597-
# are assigned to the original labels. If it is "auto", then
597+
# are assigned to the original labels. If it is "balanced", then
598598
# the class_weights are assigned after masking the labels with a OvR.
599599
le = LabelEncoder()
600600

601-
if isinstance(class_weight, dict):
601+
if isinstance(class_weight, dict) or multi_class == 'multinomial':
602602
if solver == "liblinear":
603603
if classes.size == 2:
604604
# Reconstruct the weights with keys 1 and -1
@@ -610,7 +610,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
610610
"solver cannot handle multiclass with "
611611
"class_weight of type dict. Use the lbfgs, "
612612
"newton-cg or sag solvers or set "
613-
"class_weight='auto'")
613+
"class_weight='balanced'")
614614
else:
615615
class_weight_ = compute_class_weight(class_weight, classes, y)
616616
sample_weight *= class_weight_[le.fit_transform(y)]
@@ -623,20 +623,20 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
623623
mask = (y == pos_class)
624624
y_bin = np.ones(y.shape, dtype=np.float64)
625625
y_bin[~mask] = -1.
626+
# for compute_class_weight
627+
628+
if class_weight in ("auto", "balanced"):
629+
class_weight_ = compute_class_weight(class_weight, mask_classes,
630+
y_bin)
631+
sample_weight *= class_weight_[le.fit_transform(y_bin)]
626632

627633
else:
628634
lbin = LabelBinarizer()
629-
Y_bin = lbin.fit_transform(y)
630-
if Y_bin.shape[1] == 1:
631-
Y_bin = np.hstack([1 - Y_bin, Y_bin])
632-
w0 = np.zeros((Y_bin.shape[1], n_features + int(fit_intercept)),
635+
Y_binarized = lbin.fit_transform(y)
636+
if Y_binarized.shape[1] == 1:
637+
Y_binarized = np.hstack([1 - Y_binarized, Y_binarized])
638+
w0 = np.zeros((Y_binarized.shape[1], n_features + int(fit_intercept)),
633639
order='F')
634-
mask_classes = classes
635-
636-
if class_weight == "auto":
637-
class_weight_ = compute_class_weight(class_weight, mask_classes,
638-
y_bin)
639-
sample_weight *= class_weight_[le.fit_transform(y_bin)]
640640

641641
if coef is not None:
642642
# it must work both giving the bias term and not
@@ -665,7 +665,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
665665
if multi_class == 'multinomial':
666666
# fmin_l_bfgs_b and newton-cg accepts only ravelled parameters.
667667
w0 = w0.ravel()
668-
target = Y_bin
668+
target = Y_binarized
669669
if solver == 'lbfgs':
670670
func = lambda x, *args: _multinomial_loss_grad(x, *args)[0:2]
671671
elif solver == 'newton-cg':
@@ -1538,6 +1538,15 @@ def fit(self, X, y, sample_weight=None):
15381538
raise ValueError("class_weight provided should be a "
15391539
"dict or 'balanced'")
15401540

1541+
# compute the class weights for the entire dataset y
1542+
if self.class_weight in ("auto", "balanced"):
1543+
classes = np.unique(y)
1544+
class_weight = 341A compute_class_weight(self.class_weight, classes, y)
1545+
class_weight = dict(zip(classes, class_weight))
1546+
else:
1547+
class_weight = self.class_weight
1548+
1549+
15411550
path_func = delayed(_log_reg_scoring_path)
15421551

15431552
# The SAG solver releases the GIL so it's more efficient to use
@@ -1549,7 +1558,7 @@ def fit(self, X, y, sample_weight=None):
15491558
fit_intercept=self.fit_intercept, penalty=self.penalty,
15501559
dual=self.dual, solver=self.solver, tol=self.tol,
15511560
max_iter=self.max_iter, verbose=self.verbose,
1552-
class_weight=self.class_weight, scoring=self.scoring,
1561+
class_weight=class_weight, scoring=self.scoring,
15531562
multi_class=self.multi_class,
15541563
intercept_scaling=self.intercept_scaling,
15551564
random_state=self.random_state,
@@ -1621,7 +1630,7 @@ def fit(self, X, y, sample_weight=None):
16211630
fit_intercept=self.fit_intercept, coef=coef_init,
16221631
max_iter=self.max_iter, tol=self.tol,
16231632
penalty=self.penalty, copy=False,
1624-
class_weight=self.class_weight,
1633+
class_weight=class_weight,
16251634
multi_class=self.multi_class,
16261635
verbose=max(0, self.verbose - 1),
16271636
random_state=self.random_state,

sklearn/linear_model/tests/test_logistic.py

Lines changed: 55 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,12 @@
1111
from sklearn.utils.testing import assert_raises
1212
from sklearn.utils.testing import assert_true
1313
from sklearn.utils.testing import assert_warns
14+
from sklearn.utils.testing import assert_warns_message
1415
from sklearn.utils.testing import raises
1516
from sklearn.utils.testing import ignore_warnings
1617
from sklearn.utils.testing import assert_raise_message
1718
from sklearn.exceptions import ConvergenceWarning
19+
from sklearn.utils import compute_class_weight
1820

1921
from sklearn.linear_model.logistic import (
2022
LogisticRegression,
@@ -26,7 +28,6 @@
2628
from sklearn.datasets import load_iris, make_classification
2729
from sklearn.metrics import log_loss
2830

29-
3031
X = [[-1, 0], [0, 1], [1, 1]]
3132
X_sp = sp.csr_matrix(X)
3233
Y1 = [0, 1, 1]
@@ -542,12 +543,12 @@ def test_logistic_regressioncv_class_weights():
542543
X, y = make_classification(n_samples=20, n_features=20, n_informative=10,
543544
n_classes=3, random_state=0)
544545

545-
# Test the liblinear fails when class_weight of type dict is
546-
# provided, when it is multiclass. However it can handle
547-
# binary problems.
546+
msg = ("In LogisticRegressionCV the liblinear solver cannot handle "
547+
"multiclass with class_weight of type dict. Use the lbfgs, "
548+
"newton-cg or sag solvers or set class_weight='balanced'")
548549
clf_lib = LogisticRegressionCV(class_weight={0: 0.1, 1: 0.2},
549550
solver='liblinear')
550-
assert_raises(ValueError, clf_lib.fit, X, y)
551+
assert_raise_message(ValueError, msg, clf_lib.fit, X, y)
551552
y_ = y.copy()
552553
y_[y == 2] = 1
553554
clf_lib.fit(X, y_)
@@ -613,6 +614,55 @@ def test_logistic_regression_sample_weights():
613614
assert_array_almost_equal(clf_cw_12.coef_, clf_sw_12.coef_, decimal=4)
614615

615616

617+
def _compute_class_weight_dictionary(y):
618+
# helper for returning a dictionary instead of an array
619+
classes = np.unique(y)
620+
class_weight = compute_class_weight("balanced", classes, y)
621+
class_weight_dict = dict(zip(classes, class_weight))
622+
return class_weight_dict
623+
624+
625+
def test_logistic_regression_class_weights():
626+
# Multinomial case: remove 90% of class 0
627+
X = iris.data[45:, :]
628+
y = iris.target[45:]
629+
solvers = ("lbfgs", "newton-cg")
630+
class_weight_dict = _compute_class_weight_dictionary(y)
631+
632+
for solver in solvers:
633+
clf1 = LogisticRegression(solver=solver, multi_class="multinomial",
634+
class_weight="balanced")
635+
clf2 = LogisticRegression(solver=solver, multi_class="multinomial",
636+
class_weight=class_weight_dict)
637+
clf1.fit(X, y)
638+
clf2.fit(X, y)
639+
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
640+
641+
# Binary case: remove 90% of class 0 and 100% of class 2
642+
X = iris.data[45:100, :]
643+
y = iris.target[45:100]
644+
solvers = ("lbfgs", "newton-cg", "liblinear")
645+
class_weight_dict = _compute_class_weight_dictionary(y)
646+
647+
for solver in solvers:
648+
clf1 = LogisticRegression(solver=solver, multi_class="ovr",
649+
class_weight="balanced")
650+
clf2 = LogisticRegression(solver=solver, multi_class="ovr",
651+
class_weight=class_weight_dict)
652+
clf1.fit(X, y)
653+
clf2.fit(X, y)
654+
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
655+
656+
657+
def test_multinomial_logistic_regression_with_classweight_auto():
658+
X, y = iris.data, iris.target
659+
model = LogisticRegression(multi_class='multinomial',
660+
class_weight='auto', solver='lbfgs')
661+
assert_warns_message(DeprecationWarning,
662+
"class_weight='auto' heuristic is deprecated",
663+
model.fit, X, y)
664+
665+
616666
def test_logistic_regression_convergence_warnings():
617667
# Test that warnings are raised if model does not converge
618668

sklearn/tests/test_common.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -113,12 +113,6 @@ def test_class_weight_balanced_linear_classifiers():
113113
and issubclass(clazz, LinearClassifierMixin)]
114114

115115
for name, Classifier in linear_classifiers:
116-
if name == "LogisticRegressionCV":
117-
# Contrary to RidgeClassifierCV, LogisticRegressionCV use actual
118-
# CV folds and fit a model for each CV iteration before averaging
119-
# the coef. Therefore it is expected to not behave exactly as the
120-
# other linear model.
121-
continue
122116
yield check_class_weight_balanced_linear_classifier, name, Classifier
123117

124118

0 commit comments

Comments
 (0)
0