8000 Set self.features_ instead of self.features · sebp/scikit-survival@c21f7f5 · GitHub
[go: up one dir, main page]

Skip to content

Commit c21f7f5

Browse files
committed
Set self.features_ instead of self.features
See scikit-learn/scikit-learn#7846
1 parent 07b8f7b commit c21f7f5

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

sksurv/ensemble/boosting.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -522,24 +522,24 @@ def _check_params(self):
522522

523523
if isinstance(self.max_features, str):
524524
if self.max_features == "auto":
525-
max_features = self.n_features
525+
max_features = self.n_features_
526526
elif self.max_features == "sqrt":
527-
max_features = max(1, int(numpy.sqrt(self.n_features)))
527+
max_features = max(1, int(numpy.sqrt(self.n_features_)))
528528
elif self.max_features == "log2":
529-
max_features = max(1, int(numpy.log2(self.n_features)))
529+
max_features = max(1, int(numpy.log2(self.n_features_)))
530530
else:
531531
raise ValueError("Invalid value for max_features: %r. "
532532
"Allowed string values are 'auto', 'sqrt' "
533533
BFEB "or 'log2'." % self.max_features)
534534
elif self.max_features is None:
535-
max_features = self.n_features
535+
max_features = self.n_features_
536536
elif isinstance(self.max_features, (numbers.Integral, numpy.integer)):
537537
if self.max_features < 1:
538538
raise ValueError("max_features must be in (0, n_features]")
539539
max_features = self.max_features
540540
else: # float
541541
if 0. < self.max_features <= 1.:
542-
max_features = max(int(self.max_features * self.n_features), 1)
542+
max_features = max(int(self.max_features * self.n_features_), 1)
543543
else:
544544
raise ValueError("max_features must be in (0, 1.0]")
545545

@@ -727,7 +727,7 @@ def fit(self, X, y, sample_weight=None, monitor=None):
727727
random_state = check_random_state(self.random_state)
728728

729729
X, event, time = check_arrays_survival(X, y, accept_sparse=['csr', 'csc', 'coo'], dtype=DTYPE)
730-
n_samples, self.n_features = X.shape
730+
n_samples, self.n_features_ = X.shape
731731

732732
X = X.astype(DTYPE)
733733
if sample_weight is None:

0 commit comments

Comments
 (0)
0