From caa34cf4d333fc27c770865c8896595fdec581f1 Mon Sep 17 00:00:00 2001 From: dsquareindia Date: Fri, 8 Jan 2016 02:02:06 +0530 Subject: [PATCH] FIX #6076. Bug in QuantileLossFunction Flipped `diff` sign for cases of `~mask`. --- sklearn/ensemble/gradient_boosting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sklearn/ensemble/gradient_boosting.py b/sklearn/ensemble/gradient_boosting.py index 4a6f4133033e9..295c6ead6c32c 100644 --- a/sklearn/ensemble/gradient_boosting.py +++ b/sklearn/ensemble/gradient_boosting.py @@ -421,10 +421,10 @@ def __call__(self, y, pred, sample_weight=None): mask = y > pred if sample_weight is None: loss = (alpha * diff[mask].sum() + - (1.0 - alpha) * diff[~mask].sum()) / y.shape[0] + (1.0 - alpha) * (-diff[~mask]).sum()) / y.shape[0] else: loss = ((alpha * np.sum(sample_weight[mask] * diff[mask]) + - (1.0 - alpha) * np.sum(sample_weight[~mask] * diff[~mask])) / + (1.0 - alpha) * np.sum(sample_weight[~mask] * (-diff[~mask]))) / sample_weight.sum()) return loss