8000 FIX multinomial logistic regression class weigths · scikit-learn/scikit-learn@7f0c91b · GitHub
[go: up one dir, main page]

Skip to content

Commit 7f0c91b

Browse files
committed
FIX multinomial logistic regression class weigths
1 parent 6db9ee0 commit 7f0c91b

File tree

2 files changed

+73
-19
lines changed

2 files changed

+73
-19
lines changed

sklearn/linear_model/logistic.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -593,11 +593,11 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
593593
sample_weight = np.ones(X.shape[0])
594594

595595
# If class_weights is a dict (provided by the user), the weights
596-
# are assigned to the original labels. If it is "auto", then
596+
# are assigned to the original labels. If it is "balanced", then
597597
# the class_weights are assigned after masking the labels with a OvR.
598598
le = LabelEncoder()
599599

600-
if isinstance(class_weight, dict):
600+
if isinstance(class_weight, dict) or multi_class == 'multinomial':
601601
if solver == "liblinear":
602602
if classes.size == 2:
603603
# Reconstruct the weights with keys 1 and -1
@@ -609,7 +609,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
609609
"solver cannot handle multiclass with "
610610
"class_weight of type dict. Use the lbfgs, "
611611
"newton-cg or sag solvers or set "
612-
"class_weight='auto'")
612+
"class_weight='balanced'")
613613
else:
614614
class_weight_ = compute_class_weight(class_weight, classes, y)
615615
sample_weight *= class_weight_[le.fit_transform(y)]
@@ -622,20 +622,20 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
622622
mask = (y == pos_class)
623623
y_bin = np.ones(y.shape, dtype=np.float64)
624624
y_bin[~mask] = -1.
625+
# for compute_class_weight
626+
627+
if class_weight in ("auto", "balanced"):
628+
class_weight_ = compute_class_weight(class_weight, mask_classes,
629+
y_bin)
630+
sample_weight *= class_weight_[le.fit_transform(y_bin)]
625631

626632
else:
627633
lbin = LabelBinarizer()
628-
Y_bin = lbin.fit_transform(y)
629-
if Y_bin.shape[1] == 1:
630-
Y_bin = np.hstack([1 - Y_bin, Y_bin])
631-
w0 = np.zeros((Y_bin.shape[1], n_features + int(fit_intercept)),
634+
Y_binarized = lbin.fit_transform(y)
635+
if Y_binarized.shape[1] == 1:
636+
Y_binarized = np.hstack([1 - Y_binarized, Y_binarized])
637+
w0 = np.zeros((Y_binarized.shape[1], n_features + int(fit_intercept)),
632638
order='F')
633-
mask_classes = classes
634-
635-
if class_weight == "auto":
636-
class_weight_ = compute_class_weight(class_weight, mask_classes,
637-
y_bin)
638-
sample_weight *= class_weight_[le.fit_transform(y_bin)]
639639

640640
if coef is not None:
641641
# it must work both giving the bias term and not
@@ -664,7 +664,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
664664
if multi_class == 'multinomial':
665665
# fmin_l_bfgs_b and newton-cg accepts only ravelled parameters.
666666
w0 = w0.ravel()
667-
target = Y_bin
667+
target = Y_binarized
668668
if solver == 'lbfgs':
669669
func = lambda x, *args: _multinomial_loss_grad(x, *args)[0:2]
670670
elif solver == 'newton-cg':

sklearn/linear_model/tests/test_logistic.py

Lines changed: 59 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,12 @@
1111
from sklearn.utils.testing import assert_raises
1212
from sklearn.utils.testing import assert_true
1313
from sklearn.utils.testing import assert_warns
14+
from sklearn.utils.testing import assert_warns_message
1415
from sklearn.utils.testing import raises
1516
from sklearn.utils.testing import ignore_warnings
1617
from sklearn.utils.testing import assert_raise_message
1718
from sklearn.utils import ConvergenceWarning
19+
from sklearn.utils import compute_class_weight
1820

1921
from sklearn.linear_model.logistic import (
2022
LogisticRegression,
@@ -26,7 +28,6 @@
2628
from sklearn.datasets import load_iris, make_classification
2729
from sklearn.metrics import log_loss
2830

29-
3031
X = [[-1, 0], [0, 1], [1, 1]]
3132
X_sp = sp.csr_matrix(X)
3233
Y1 = [0, 1, 1]
@@ -542,12 +543,12 @@ def test_logistic_regressioncv_class_weights():
542543
X, y = make_classification(n_samples=20, n_features=20, n_informative=10,
543544
n_classes=3, random_state=0)
544545

545-
# Test the liblinear fails when class_weight of type dict is
546-
# provided, when it is multiclass. However it can handle
547-
# binary problems.
546+
msg = ("In LogisticRegressionCV the liblinear solver cannot handle "
547+
"multiclass with class_weight of type dict. Use the lbfgs, "
548+
"newton-cg or sag solvers or set class_weight='balanced'")
548549
clf_lib = LogisticRegressionCV(class_weight={0: 0.1, 1: 0.2},
549550
solver='liblinear')
550-
assert_raises(ValueError, clf_lib.fit, X, y)
551+
assert_raise_message(ValueError, msg, clf_lib.fit, X, y)
551552
y_ = y.copy()
552553
y_[y == 2] = 1
553554
clf_lib.fit(X, y_)
@@ -613,6 +614,59 @@ def test_logistic_regression_sample_weights():
613614
assert_array_almost_equal(clf_cw_12.coef_, clf_sw_12.coef_, decimal=4)
614615

615616

617+
def _compute_class_weight_dictionary(y):
618+
# compute class_weight and return it as a dictionary
619+
classes = np.unique(y)
620+
class_weight = compute_class_weight("balanced", classes, y)
621+
622+
class_weight_dict = {}
623+
for (cw, cl) in zip(class_weight, classes):
624+
class_weight_dict[cl] = cw
625+
626+
return class_weight_dict
627+
628+
629+
def test_logistic_regression_class_weights():
630+
# Multinomial case: remove 90% of class 0
631+
X = iris.data[45:, :]
632+
y = iris.target[45:]
633+
solvers = ("lbfgs", "newton-cg")
634+
class_weight_dict = _compute_class_weight_dictionary(y)
635+
636+
for solver in solvers:
637+
clf1 = LogisticRegression(solver=solver, multi_class="multinomial",
638+
class_weight="balanced")
639+
clf2 = LogisticRegression(solver=solver, multi_class="multinomial",
640+
class_weight=class_weight_dict)
641+
clf1.fit(X, y)
642+
clf2.fit(X, y)
643+
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
644+
645+
# Binary case: remove 90% of class 0 and 100% of class 2
646+
X = iris.data[45:100, :]
647+
y = iris.target[45:100]
648+
solvers = ("lbfgs", "newton-cg", "liblinear")
649+
class_weight_dict = _compute_class_weight_dictionary(y)
650+
651+
for solver in solvers:
652+
clf1 = LogisticRegression(solver=solver, multi_class="ovr",
653+
class_weight="balanced")
654+
clf2 = LogisticRegression(solver=solver, multi_class="ovr",
655+
class_weight=class_weight_dict)
656+
clf1.fit(X, y)
657+
clf2.fit(X, y)
658+
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
659+
660+
661+
def test_multinomial_logistic_regression_with_classweight_auto():
662+
X, y = iris.data, iris.target
663+
model = LogisticRegression(multi_class='multinomial',
664+
class_weight='auto', solver='lbfgs')
665+
assert_warns_message(DeprecationWarning,
666+
"class_weight='auto' heuristic is deprecated",
667+
model.fit, X, y)
668+
669+
616670
def test_logistic_regression_convergence_warnings():
617671
# Test that warnings are raised if model does not converge
618672

0 commit comments

Comments
 (0)
0