8000 cosmit · bennihepp/scikit-learn@b35aa6f · GitHub
[go: up one dir, main page]

Skip to content

Commit b35aa6f

Browse files
committed
cosmit
1 parent 40f2132 commit b35aa6f

File tree

1 file changed

+15
-13
lines changed

1 file changed

+15
-13
lines changed

sklearn/ensemble/gradient_boosting.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,9 @@ class LossFunction(object):
9797
Attributes
9898
----------
9999
K : int
100-
The number of classes; 1 for regression.
100+
The number of regression trees to be induced;
101+
1 for regression and binary classification;
102+
``n_classes`` for multi-class classification.
101103
"""
102104
__metaclass__ = ABCMeta
103105

@@ -518,11 +520,11 @@ def staged_decision_function(self, X):
518520
raise ValueError("X.shape[1] should be %d, not %d." %
519521
(self.n_features, X.shape[1]))
520522

521-
f = self.init.predict(X).astype(np.float64)
523+
score = self.init.predict(X).astype(np.float64)
522524

523525
for i in range(self.n_estimators):
524-
predict_stage(self.estimators_, i, X, self.learn_rate, f)
525-
yield f
526+
predict_stage(self.estimators_, i, X, self.learn_rate, score)
527+
yield score
526528

527529

528530
class GradientBoostingClassifier(BaseGradientBoosting, ClassifierMixin):
@@ -643,8 +645,8 @@ def predict(self, X):
643645
y : array of shape = [n_samples]
644646
The predicted classes.
645647
"""
646-
P = self.predict_proba(X)
647-
return self.classes_.take(np.argmax(P, axis=1), axis=0)
648+
probas = self.predict_proba(X)
649+
return self.classes_.take(np.argmax(probas, axis=1), axis=0)
648650

649651
def predict_proba(self, X):
650652
"""Predict class probabilities for X.
@@ -669,17 +671,17 @@ def predict_proba(self, X):
669671
raise ValueError("X.shape[1] should be %d, not %d." %
670672
(self.n_features, X.shape[1]))
671673

672-
P = np.ones((X.shape[0], self.n_classes_), dtype=np.float64)
674+
proba = np.ones((X.shape[0], self.n_classes_), dtype=np.float64)
673675

674-
f = self.init.predict(X).astype(np.float64)
675-
predict_stages(self.estimators_, X, self.learn_rate, f)
676+
score = self.init.predict(X).astype(np.float64)
677+
predict_stages(self.estimators_, X, self.learn_rate, score)
676678

677679
if not self.loss_.is_multi_class:
678-
P[:, 1] = 1.0 / (1.0 + np.exp(-f.ravel()))
679-
P[:, 0] -= P[:, 1]
680+
proba[:, 1] = 1.0 / (1.0 + np.exp(-score.ravel()))
681+
proba[:, 0] -= proba[:, 1]
680682
else:
681-
P = np.exp(f) / np.sum(np.exp(f), axis=1)[:, np.newaxis]
682-
return P
683+
proba = np.exp(score) / np.sum(np.exp(score), axis=1)[:, np.newaxis]
684+
return proba
683685

684686

685687
class GradientBoostingRegressor(BaseGradientBoosting, RegressorMixin):

0 commit comments

Comments
 (0)
0