8000 Fixed warnings for DataDimensionalityWarning, decision_function and d… · scikit-learn/scikit-learn@a25aa88 · GitHub
[go: up one dir, main page]

Skip to content

Commit a25aa88

Browse files
committed
Fixed warnings for DataDimensionalityWarning, decision_function and decision_function_shape.
1 parent 90922ea commit a25aa88

File tree

5 files changed

+21
-21
lines changed

5 files changed

+21
-21
lines changed

sklearn/linear_model/tests/test_least_angle.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ def test_multitarget():
370370
for estimator in (linear_model.LassoLars(), linear_model.Lars()):
371371
estimator.fit(X, Y)
372372
Y_pred = estimator.predict(X)
373-
Y_dec = estimator.decision_function(X)
373+
Y_dec = ignore_warnings(estimator.decision_function)(X)
374374
assert_array_almost_equal(Y_pred, Y_dec)
375375
alphas, active, coef, path = (estimator.alphas_, estimator.active_,
376376
estimator.coef_, estimator.coef_path_)

sklearn/linear_model/tests/test_sgd.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1040,7 +1040,7 @@ def test_partial_fit(self):
10401040
clf.partial_fit(X[:third], Y[:third])
10411041
assert_equal(clf.coef_.shape, (X.shape[1], ))
10421042
assert_equal(clf.intercept_.shape, (1,))
1043-
assert_equal(clf.decision_function([[0, 0]]).shape, (1, ))
1043+
assert_equal(clf.predict([[0, 0]]).shape, (1, ))
10441044
id1 = id(clf.coef_.data)
10451045

10461046
clf.partial_fit(X[third:], Y[third:])

sklearn/neighbors/tests/test_approximate.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def test_neighbors_accuracy_with_n_candidates():
3939

4040
for i, n_candidates in enumerate(n_candidates_values):
4141
lshf = LSHForest(n_candidates=n_candidates)
42-
lshf.fit(X)
42+
ignore_warnings(lshf.fit)(X)
4343
for j in range(n_iter):
4444
query = X[rng.randint(0, n_samples)].reshape(1, -1)
4545

@@ -74,7 +74,7 @@ def test_neighbors_accuracy_with_n_estimators():
7474

7575
for i, t in enumerate(n_estimators):
7676
lshf = LSHForest(n_candidates=500, n_estimators=t)
77-
lshf.fit(X)
77+
ignore_warnings(lshf.fit)(X)
7878
for j in range(n_iter):
7979
query = X[rng.randint(0, n_samples)].reshape(1, -1)
8080
neighbors = lshf.kneighbors(query, n_neighbors=n_points,
@@ -111,7 +111,7 @@ def test_kneighbors():
111111
# Test unfitted estimator
112112
assert_raises(ValueError, lshf.kneighbors, X[0])
113113

114-
lshf.fit(X)
114+
ignore_warnings(lshf.fit)(X)
115115

116116
for i in range(n_iter):
117117
n_neighbors = rng.randint(0, n_samples)
@@ -162,7 +162,7 @@ def test_radius_neighbors():
162162
# Test unfitted estimator
163163
assert_raises(ValueError, lshf.radius_neighbors, X[0])
164164

165-
lshf.fit(X)
165+
ignore_warnings(lshf.fit)(X)
166166

167167
for i in range(n_iter):
168168
# Select a random point in the dataset as the query
@@ -218,6 +218,7 @@ def test_radius_neighbors():
218218
sorted_dists_approx)))
219219

220220

221+
@ignore_warnings
221222
def test_radius_neighbors_boundary_handling():
222223
X = [[0.999, 0.001], [0.5, 0.5], [0, 1.], [-1., 0.001]]
223224
n_points = len(X)
@@ -286,7 +287,7 @@ def test_distances():
286287
X = rng.rand(n_samples, n_features)
287288

288289
lshf = LSHForest()
289-
lshf.fit(X)
290+
ignore_warnings(lshf.fit)(X)
290291

291292
for i in range(n_iter):
292293
n_neighbors = rng.randint(0, n_samples)
@@ -312,7 +313,7 @@ def test_fit():
312313
X = rng.rand(n_samples, n_features)
313314

314315
lshf = LSHForest(n_estimators=n_estimators)
315-
lshf.fit(X)
316+
ignore_warnings(lshf.fit)(X)
316317

317318
# _input_array = X
318319
assert_array_equal(X, lshf._fit_X)
@@ -343,16 +344,16 @@ def test_partial_fit():
343344
lshf = LSHForest()
344345

345346
# Test unfitted estimator
346-
lshf.partial_fit(X)
347+
ignore_warnings(lshf.partial_fit)(X)
347348
assert_array_equal(X, lshf._fit_X)
348349

349-
lshf.fit(X)
350+
ignore_warnings(lshf.fit)(X)
350351

351352
# Insert wrong dimension
352353
assert_raises(ValueError, lshf.partial_fit,
353354
np.random.randn(n_samples_partial_fit, n_features - 1))
354355

355-
lshf.partial_fit(X_partial_fit)
356+
ignore_warnings(lshf.partial_fit)(X_partial_fit)
356357

357358
# size of _input_array = samples + 1 after insertion
358359
assert_equal(lshf._fit_X.shape[0],
@@ -379,7 +380,7 @@ def test_hash_functions():
379380

380381
lshf = LSHForest(n_estimators=n_estimators,
381382
random_state=rng.randint(0, np.iinfo(np.int32).max))
382-
lshf.fit(X)
383+
ignore_warnings(lshf.fit)(X)
383384

384385
hash_functions = []
385386
for i in range(n_estimators):
@@ -405,7 +406,7 @@ def test_candidates():
405406

406407
# For zero candidates
407408
lshf = LSHForest(min_hash_match=32)
408-
lshf.fit(X_train)
409+
ignore_warnings(lshf.fit)(X_train)
409410

410411
message = ("Number of candidates is not sufficient to retrieve"
411412
" %i neighbors with"
@@ -419,7 +420,7 @@ def test_candidates():
419420

420421
# For candidates less than n_neighbors
421422
lshf = LSHForest(min_hash_match=31)
422-
lshf.fit(X_train)
423+
ignore_warnings(lshf.fit)(X_train)
423424

424425
message = ("Number of candidates is not sufficient to retrieve"
425426
" %i neighbors with"
@@ -441,7 +442,7 @@ def test_graphs():
441442
for n_samples in n_samples_sizes:
442443
X = rng.rand(n_samples, n_features)
443444
lshf = LSHForest(min_hash_match=0)
444-
lshf.fit(X)
445+
ignore_warnings(lshf.fit)(X)
445446

446447
kneighbors_graph = lshf.kneighbors_graph(X)
447448
radius_neighbors_graph = lshf.radius_neighbors_graph(X)

sklearn/svm/tests/test_sparse.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def check_svm_model_equal(dense_svm, sparse_svm, X_train, y_train, X_test):
6363
msg = "cannot use sparse input in 'OneClassSVM' trained on dense data"
6464
else:
6565
assert_array_almost_equal(dense_svm.predict_proba(X_test_dense),
66-
sparse_svm.predict_proba(X_test), 4)
66+
sparse_svm.predict_proba(X_test), 4)
6767
msg = "cannot use sparse input in 'SVC' trained on dense data"
6868
if sparse.isspmatrix(X_test):
6969
assert_raise_message(ValueError, msg, dense_svm.predict, X_test)

sklearn/svm/tests/test_svm.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -354,10 +354,9 @@ def test_decision_function_shape():
354354
assert_equal(dec.shape, (len(X_train), 10))
355355

356356

357-
@ignore_warnings
358-
def test_svr_decision_function():
357+
def test_svr_predict():
359358
# Test SVR's decision_function
360-
# Sanity check, test that decision_function implemented in python
359+
# Sanity check, test that predict implemented in python
361360
# returns the same as the one in libsvm
362361

363362
X = iris.data
@@ -367,14 +366,14 @@ def test_svr_decision_function():
367366
reg = svm.SVR(kernel='linear', C=0.1).fit(X, y)
368367

369368
dec = np.dot(X, reg.coef_.T) + reg.intercept_
370-
assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel())
369+
assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())
371370

372371
# rbf kernel
373372
reg = svm.SVR(kernel='rbf', gamma=1).fit(X, y)
374373

375374
rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
376375
dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
377-
assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel())
376+
assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())
378377

379378

380379
def test_weight():

0 commit comments

Comments
 (0)
0