8000 MAINT Remove tolerance paramter from Perceptron (#14558) · thomasjpfan/scikit-learn@228109c · GitHub
[go: up one dir, main page]

Skip to content

Commit 228109c

Browse files
Harsh2098thomasjpfan
authored andcommitted
MAINT Remove tolerance paramter from Perceptron (scikit-learn#14558)
1 parent e3fe559 commit 228109c

File tree

7 files changed

+17
-18
lines changed

7 files changed

+17
-18
lines changed

benchmarks/bench_covertype.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def load_data(dtype=np.float32, order='C', random_state=13):
101101
'ExtraTrees': ExtraTreesClassifier(n_estimators=20),
102102
'RandomForest': RandomForestClassifier(n_estimators=20),
103103
'CART': DecisionTreeClassifier(min_samples_split=5),
104-
'SGD': SGDClassifier(alpha=0.001, max_iter=1000),
104+
'SGD': SGDClassifier(alpha=0.001),
105105
'GaussianNB': GaussianNB(),
106106
'liblinear': LinearSVC(loss="l2", penalty="l2", C=1000, dual=False,
107107
tol=1e-3),

doc/tutorial/text_analytics/solutions/exercise_01_language_train_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
# the pipeline instance should stored in a variable named clf
3838
clf = Pipeline([
3939
('vec', vectorizer),
40-
('clf', Perceptron(tol=1e-3)),
40+
('clf', Perceptron()),
4141
])
4242

4343
# TASK: Fit the pipeline on the training set

examples/applications/plot_out_of_core_classification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ def progress(blocknum, bs, size):
208208
# Here are some classifiers that support the `partial_fit` method
209209
partial_fit_classifiers = {
210210
'SGD': SGDClassifier(max_iter=5),
211-
'Perceptron': Perceptron(tol=1e-3),
211+
'Perceptron': Perceptron(),
212212
'NB Multinomial': MultinomialNB(alpha=0.01),
213213
'Passive-Aggressive': PassiveAggressiveClassifier(tol=1e-3),
214214
}

examples/linear_model/plot_sgd_comparison.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525

2626
classifiers = [
2727
("SGD", SGDClassifier(max_iter=100)),
28-
("ASGD", SGDClassifier(average=True, max_iter=1000)),
29-
("Perceptron", Perceptron(tol=1e-3)),
28+
("ASGD", SGDClassifier(average=True)),
29+
("Perceptron", Perceptron()),
3030
("Passive-Aggressive I", PassiveAggressiveClassifier(loss='hinge',
3131
C=1.0, tol=1e-4)),
3232
("Passive-Aggressive II", PassiveAggressiveClassifier(loss='squared_hinge',

examples/text/plot_document_classification_20newsgroups.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def benchmark(clf):
247247
results = []
248248
for clf, name in (
249249
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
250-
(Perceptron(max_iter=50, tol=1e-3), "Perceptron"),
250+
(Perceptron(max_iter=50), "Perceptron"),
251251
(PassiveAggressiveClassifier(max_iter=50, tol=1e-3),
252252
"Passive-Aggressive"),
253253
(KNeighborsClassifier(n_neighbors=10), "kNN"),

sklearn/ensemble/tests/test_bagging.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def test_classification():
6464

6565
for base_estimator in [None,
6666
DummyClassifier(),
67-
Perceptron(tol=1e-3),
67+
Perceptron(),
6868
DecisionTreeClassifier(),
6969
KNeighborsClassifier(),
7070
SVC()]:
@@ -543,7 +543,7 @@ def test_base_estimator():
543543

544544
assert isinstance(ensemble.base_estimator_, DecisionTreeClassifier)
545545

546-
ensemble = BaggingClassifier(Perceptron(tol=1e-3),
546+
ensemble = BaggingClassifier(Perceptron(),
547547
n_jobs=3,
548548
random_state=0).fit(X_train, y_train)
549549

sklearn/ensemble/tests/test_base.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
def test_base():
2323
# Check BaseEnsemble methods.
2424
ensemble = BaggingClassifier(
25-
base_estimator=Perceptron(tol=1e-3, random_state=None), n_estimators=3)
25+
base_estimator=Perceptron(random_state=None), n_estimators=3)
2626

2727
iris = load_iris()
2828
ensemble.fit(iris.data, iris.target)
@@ -43,15 +43,15 @@ def test_base():
4343
assert isinstance(ensemble[2].random_state, int)
4444
assert ensemble[1].random_state != ensemble[2].random_state
4545

46-
np_int_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
46+
np_int_ensemble = BaggingClassifier(base_estimator=Perceptron(),
4747
n_estimators=np.int32(3))
4848
np_int_ensemble.fit(iris.data, iris.target)
4949

5050

5151
def test_base_zero_n_estimators():
5252
# Check that instantiating a BaseEnsemble with n_estimators<=0 raises
5353
# a ValueError.
54-
ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
54+
ensemble = BaggingClassifier(base_estimator=Perceptron(),
5555
n_estimators=0)
5656
iris = load_iris()
5757
assert_raise_message(ValueError,
@@ -62,13 +62,13 @@ def test_base_zero_n_estimators():
6262
def test_base_not_int_n_estimators():
6363
# Check that instantiating a BaseEnsemble with a string as n_estimators
6464
# raises a ValueError demanding n_estimators to be supplied as an integer.
65-
string_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
65+
string_ensemble = BaggingClassifier(base_estimator=Perceptron(),
6666
n_estimators='3')
6767
iris = load_iris()
6868
assert_raise_message(ValueError,
6969
"n_estimators must be an integer",
7070
string_ensemble.fit, iris.data, iris.target)
71-
float_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
71+
float_ensemble = BaggingClassifier(base_estimator=Perceptron(),
7272
n_estimators=3.0)
7373
assert_raise_message(ValueError,
7474
"n_estimators must be an integer",
@@ -79,7 +79,7 @@ def test_set_random_states():
7979
# Linear Discriminant Analysis doesn't have random state: smoke test
8080
_set_random_states(LinearDiscriminantAnalysis(), random_state=17)
8181

82-
clf1 = Perceptron(tol=1e-3, random_state=None)
82+
clf1 = Perceptron(random_state=None)
8383
assert clf1.random_state is None
8484
# check random_state is None still sets
8585
_set_random_states(clf1, None)
@@ -88,16 +88,15 @@ def test_set_random_states():
8888
# check random_state fixes results in consistent initialisation
8989
_set_random_states(clf1, 3)
9090
assert isinstance(clf1.random_state, int)
91-
clf2 = Perceptron(tol=1e-3, random_state=None)
91+
clf2 = Perceptron(random_state=None)
9292
_set_random_states(clf2, 3)
9393
assert clf1.random_state == clf2.random_state
9494

9595
# nested random_state
9696

9797
def make_steps():
98-
return [('sel', SelectFromModel(Perceptron(tol=1e-3,
99-
random_state=None))),
100-
('clf', Perceptron(tol=1e-3, random_state=None))]
98+
return [('sel', SelectFromModel(Perceptron(random_state=None))),
99+
('clf', Perceptron(random_state=None))]
101100

102101
est1 = Pipeline(make_steps())
103102
_set_random_states(est1, 3)

0 commit comments

Comments
 (0)
0