8000 Revert "ENH Prefer threads for IsolationForest (#12543)" · xhluca/scikit-learn@c67fcd4 · GitHub
[go: up one dir, main page]

Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit c67fcd4

Browse files
author
Xing
authored
Revert "ENH Prefer threads for IsolationForest (scikit-learn#12543)"
This reverts commit 664a23c.
1 parent 4dbf7e4 commit c67fcd4

File tree

3 files changed

+2
-24
lines changed

3 files changed

+2
-24
lines changed

doc/whats_new/v0.21.rst

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,6 @@ Support for Python 3.4 and below has been officially dropped.
5555
:class:`linear_model.MultiTaskLasso` which were breaking when
5656
``warm_start = True``. :issue:`12360` by :user:`Aakanksha Joshi <joaak>`.
5757

58-
:mod:`sklearn.ensemble`
59-
.......................
60-
61-
- |Efficiency| Make :class:`ensemble.IsolationForest` prefer threads over
62-
processes when running with ``n_jobs > 1`` as the underlying decision tree
63-
fit calls do release the GIL. This changes reduces memory usage and
64-
communication overhead. :issue:`12543` by :user:`Isaac Storch <istorch>`
65-
and `Olivier Grisel`_.
66-
6758
:mod:`sklearn.metrics`
6859
......................
6960

sklearn/ensemble/bagging.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -243,9 +243,6 @@ def fit(self, X, y, sample_weight=None):
243243
"""
244244
return self._fit(X, y, self.max_samples, sample_weight=sample_weight)
245245

246-
def _parallel_args(self):
247-
return {}
248-
249246
def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None):
250247
"""Build a Bagging ensemble of estimators from the training
251248
set (X, y).
@@ -368,8 +365,7 @@ def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None):
368365
seeds = random_state.randint(MAX_INT, size=n_more_estimators)
369366
self._seeds = seeds
370367

371-
all_results = Parallel(n_jobs=n_jobs, verbose=self.verbose,
372-
**self._parallel_args())(
368+
all_results = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
373369
delayed(_parallel_build_estimators)(
374370
n_estimators[i],
375371
self,
@@ -690,8 +686,7 @@ def predict_proba(self, X):
690686
n_jobs, n_estimators, starts = _partition_estimators(self.n_estimators,
691687
self.n_jobs)
692688

693-
all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose,
694-
**self._parallel_args())(
689+
all 8000 _proba = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
695690
delayed(_parallel_predict_proba)(
696691
self.estimators_[starts[i]:starts[i + 1]],
697692
self.estimators_features_[starts[i]:starts[i + 1]],

sklearn/ensemble/iforest.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from ..externals import six
1515
from ..tree import ExtraTreeRegressor
1616
from ..utils import check_random_state, check_array
17-
from ..utils.fixes import _joblib_parallel_args
1817
from ..utils.validation import check_is_fitted
1918
from ..base import OutlierMixin
2019

@@ -187,13 +186,6 @@ def __init__(self,
187186
def _set_oob_score(self, X, y):
188187
raise NotImplementedError("OOB score not supported by iforest")
189188

190-
def _parallel_args(self):
191-
# ExtraTreeRegressor releases the GIL, so it's more efficient to use
192-
# a thread-based backend rather than a process-based backend so as
193-
# to avoid suffering from communication overhead and extra memory
194-
# copies.
195-
return _joblib_parallel_args(prefer='threads')
196-
197189
def fit(self, X, y=None, sample_weight=None):
198190
"""Fit estimator.
199191

0 commit comments

Comments
 (0)
0