8000 MAINT Don't use deprecated 1d X (or deprecated matplotlib stuff) in e… · scikit-learn/scikit-learn@a73d8a6 · GitHub
[go: up one dir, main page]

Skip to content

Commit a73d8a6

Browse files
committed
MAINT Don't use deprecated 1d X (or deprecated matplotlib stuff) in examples.
1 parent 417ee28 commit a73d8a6

10 files changed

+18
-20
lines changed

examples/applications/plot_prediction_latency.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def atomic_benchmark_estimator(estimator, X_test, verbose=False):
4242
n_instances = X_test.shape[0]
4343
runtimes = np.zeros(n_instances, dtype=np.float)
4444
for i in range(n_instances):
45-
instance = X_test[i, :]
45+
instance = X_test[[i], :]
4646
start = time.time()
4747
estimator.predict(instance)
4848
runtimes[i] = time.time() - start
@@ -241,7 +241,7 @@ def benchmark_throughputs(configuration, duration_secs=0.1):
241241
start_time = time.time()
242242
n_predictions = 0
243243
while (time.time() - start_time) < duration_secs:
244-
estimator_config['instance'].predict(X_test[0])
244+
estimator_config['instance'].predict(X_test[[0]])
245245
n_predictions += 1
246246
throughputs[estimator_config['name']] = n_predictions / duration_secs
247247
return throughputs

examples/classification/plot_lda_qda.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ def plot_data(lda, X, y, y_pred, fig_index):
6767
X0, X1 = X[y == 0], X[y == 1]
6868
X0_tp, X0_fp = X0[tp0], X0[~tp0]
6969
X1_tp, X1_fp = X1[tp1], X1[~tp1]
70-
xmin, xmax = X[:, 0].min(), X[:, 0].max()
71-
ymin, ymax = X[:, 1].min(), X[:, 1].max()
7270

7371
# class 0: dots
7472
plt.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color='red')
@@ -133,8 +131,8 @@ def plot_qda_cov(qda, splot):
133131
plt.axis('tight')
134132

135133
# Quadratic Discriminant Analysis
136-
qda = QuadraticDiscriminantAnalysis()
137-
y_pred = qda.fit(X, y, store_covariances=True).predict(X)
134+
qda = QuadraticDiscriminantAnalysis(store_covariances=True)
135+
y_pred = qda.fit(X, y).predict(X)
138136
splot = plot_data(qda, X, y, y_pred, fig_index=2 * i + 2)
139137
plot_qda_cov(qda, splot)
140138
plt.axis('tight')

examples/cluster/plot_feature_agglomeration_vs_univariate_selection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@
8585
clf = GridSearchCV(clf, {'anova__percentile': [5, 10, 20]}, cv=cv)
8686
clf.fit(X, y) # set the best parameters
8787
coef_ = clf.best_estimator_.steps[-1][1].coef_
88-
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
88+
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_.reshape(1, -1))
8989
coef_selection_ = coef_.reshape(size, size)
9090

9191
###############################################################################

examples/decomposition/plot_pca_iris.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@
5858
X[:, 0].min(), X[:, 0].min()]
5959
x_surf = np.array(x_surf)
6060
y_surf = np.array(y_surf)
61-
v0 = pca.transform(pca.components_[0])
61+
v0 = pca.transform(pca.components_[[0]])
6262
v0 /= v0[-1]
63-
v1 = pca.transform(pca.components_[1])
63+
v1 = pca.transform(pca.components_[[1]])
6464
v1 /= v1[-1]
6565

6666
ax.w_xaxis.set_ticklabels([])

examples/decomposition/plot_sparse_coding.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def ricker_matrix(width, resolution, n_components):
7575
for title, algo, alpha, n_nonzero, color in estimators:
7676
coder = SparseCoder(dictionary=D, transform_n_nonzero_coefs=n_nonzero,
7777
transform_alpha=alpha, transform_algorithm=algo)
78-
x = coder.transform(y)
78+
x = coder.transform(y.reshape(1, -1))
7979
density = len(np.flatnonzero(x))
8080
x = np.ravel(np.dot(x, D))
8181
squared_error = np.sum((y - x) ** 2)
@@ -86,7 +86,7 @@ def ricker_matrix(width, resolution, n_components):
8686
# Soft thresholding debiasing
8787
coder = SparseCoder(dictionary=D, transform_algorithm='threshold',
8888
transform_alpha=20)
89-
x = coder.transform(y)
89+
x = coder.transform(y.reshape(1, -1))
9090
_, idx = np.where(x != 0)
9191
x[0, idx], _, _, _ = np.linalg.lstsq(D[idx, :].T, y)
9292
x = np.ravel(np.dot(x, D))

examples/ensemble/plot_gradient_boosting_regression.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
# compute test set deviance
4848
test_score = np.zeros((params['n_estimators'],), dtype=np.float64)
4949

50-
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
50+
for i, y_pred in enumerate(clf.staged_predict(X_test)):
5151
test_score[i] = clf.loss_(y_test, y_pred)
5252

5353
plt.figure(figsize=(12, 6))

examples/linear_model/plot_sgd_separating_hyperplane.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
for (i, j), val in np.ndenumerate(X1):
3131
x1 = val
3232
x2 = X2[i, j]
33-
p = clf.decision_function([x1, x2])
33+
p = clf.decision_function([[x1, x2]])
3434
Z[i, j] = p[0]
3535
levels = [-1.0, 0.0, 1.0]
3636
linestyles = ['dashed', 'solid', 'dashed']

examples/manifold/plot_compare_methods.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
Y = tsne.fit_transform(X)
114114
t1 = time()
115115
print("t-SNE: %.2g sec" % (t1 - t0))
116-
ax = fig.add_subplot(250)
116+
ax = fig.add_subplot(2, 5, 10)
117117
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
118118
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
119119
ax.xaxis.set_major_formatter(NullFormatter())

examples/manifold/plot_manifold_sphere.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@
103103
print("%s: %.2g sec" % ('ISO', t1 - t0))
104104

105105
ax = fig.add_subplot(257)
106-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
106+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
107107
plt.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
108108
ax.xaxis.set_major_formatter(NullFormatter())
109109
ax.yaxis.set_major_formatter(NullFormatter())
@@ -117,7 +117,7 @@
117117
print("MDS: %.2g sec" % (t1 - t0))
118118

119119
ax = fig.add_subplot(258)
120-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
120+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
121121
plt.title("MDS (%.2g sec)" % (t1 - t0))
122122
ax.xaxis.set_major_formatter(NullFormatter())
123123
ax.yaxis.set_major_formatter(NullFormatter())
@@ -132,7 +132,7 @@
132132
print("Spectral Embedding: %.2g sec" % (t1 - t0))
133133

134134
ax = fig.add_subplot(259)
135-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
135+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
136136
plt.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
137137
ax.xaxis.set_major_formatter(NullFormatter())
138138
ax.yaxis.set_major_formatter(NullFormatter())
@@ -145,8 +145,8 @@
145145
t1 = time()
146146
print("t-SNE: %.2g sec" % (t1 - t0))
147147

148-
ax = fig.add_subplot(250)
149-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
148+
ax = fig.add_subplot(2, 5, 10)
149+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
150150
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
151151
ax.xaxis.set_major_formatter(NullFormatter())
152152
ax.yaxis.set_major_formatter(NullFormatter())

examples/neighbors/plot_approximate_nearest_neighbors_scalability.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@
8989

9090
for i in range(n_iter):
9191
# pick one query at random to study query time variability in LSHForest
92-
query = queries[rng.randint(0, n_queries)]
92+
query = queries[[rng.randint(0, n_queries)]]
9393

9494
t0 = time.time()
9595
exact_neighbors = nbrs.kneighbors(query, return_distance=False)

0 commit comments

Comments
 (0)
0