8000 [MRG+1] Modify gaussian_process examples for matplotlib v2 comp (#8394) · scikit-learn/scikit-learn@d9b525a · GitHub
[go: up one dir, main page]

Skip to content

Commit d9b525a

Browse files
rishikksh20amueller
authored andcommitted
[MRG+1] Modify gaussian_process examples for matplotlib v2 comp (#8394)
* Modify plot_gpc_xor.py for matplotlib v2 comp Add edgecolors option in scatter plot Issue: #8364 * Modify plot_gpr_noisy.py for matplotlib v2 comp Add edgecolors option in scatter plot Issue: #8364 * Modify plot_gpr_prior_posterior for matplotlibv2 Add edgecolors attribute in scatter plot Issue: #8364 * Modify plot_gpc.py for matplotlib v2 comp Add edgecolors attribute in scatter plot. Issue: #8364 * Modify plot_gpr_noisy.py to remove flake8 error Modify file to pass travis build * Cosmetic change in plot_gpr_prior_posterior Reduce `alpha` for better plot. Issue: #8364 * Modify plot_gpr_noisy.py for better visualization Modify colorbar in contour plot. Issue: #8364
1 parent 60deaea commit d9b525a

File tree

4 files changed

+13
-10
lines changed

4 files changed

+13
-10
lines changed

examples/gaussian_process/plot_gpc.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,10 @@
6464

6565
# Plot posteriors
6666
plt.figure(0)
67-
plt.scatter(X[:train_size, 0], y[:train_size], c='k', label="Train data")
68-
plt.scatter(X[train_size:, 0], y[train_size:], c='g', label="Test data")
67+
plt.scatter(X[:train_size, 0], y[:train_size], c='k', label="Train data",
68+
edgecolors=(0, 0, 0))
69+
plt.scatter(X[train_size:, 0], y[train_size:], c='g', label="Test data",
70+
edgecolors=(0, 0, 0))
6971
X_ = np.linspace(0, 5, 100)
7072
plt.plot(X_, gp_fix.predict_proba(X_[:, np.newaxis])[:, 1], 'r',
7173
label="Initial kernel: %s" % gp_fix.kernel_)

examples/gaussian_process/plot_gpc_xor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,8 @@
4444
aspect='auto', origin='lower', cmap=plt.cm.PuOr_r)
4545
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
4646
linetypes='--')
47-
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired)
47+
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired,
48+
edgecolors=(0, 0, 0))
4849
plt.xticks(())
4950
plt.yticks(())
5051
plt.axis([-3, 3, -3, 3])

examples/gaussian_process/plot_gpr_noisy.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
y_mean + np.sqrt(np.diag(y_cov)),
4848
alpha=0.5, color='k')
4949
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
50-
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
50+
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))
5151
plt.title("Initial: %s\nOptimum: %s\nLog-Marginal-Likelihood: %s"
5252
% (kernel, gp.kernel_,
5353
gp.log_marginal_likelihood(gp.kernel_.theta)))
@@ -66,7 +66,7 @@
6666
y_mean + np.sqrt(np.diag(y_cov)),
6767
alpha=0.5, color='k')
6868
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
69-
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
69+
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))
7070
plt.title("Initial: %s\nOptimum: %s\nLog-Marginal-Likelihood: %s"
7171
% (kernel, gp.kernel_,
7272
gp.log_marginal_likelihood(gp.kernel_.theta)))
@@ -83,9 +83,9 @@
8383

8484
vmin, vmax = (-LML).min(), (-LML).max()
8585
vmax = 50
86+
level = np.around(np.logspace(np.log10(vmin), np.log10(vmax), 50), decimals=1)
8687
plt.contour(Theta0, Theta1, -LML,
87-
levels=np.logspace(np.log10(vmin), np.log10(vmax), 50),
88-
norm=LogNorm(vmin=vmin, vmax=vmax))
88+
levels=level, norm=LogNorm(vmin=vmin, vmax=vmax))
8989
plt.colorbar()
9090
plt.xscale("log")
9191
plt.yscale("log")

examples/gaussian_process/plot_gpr_prior_posterior.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
4545
plt.plot(X_, y_mean, 'k', lw=3, zorder=9)
4646
plt.fill_between(X_, y_mean - y_std, y_mean + y_std,
47-
alpha=0.5, color='k')
47+
alpha=0.2, color='k')
4848
y_samples = gp.sample_y(X_[:, np.newaxis], 10)
4949
plt.plot(X_, y_samples, lw=1)
5050
plt.xlim(0, 5)
@@ -63,11 +63,11 @@
6363
y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
6464
plt.plot(X_, y_mean, 'k', lw=3, zorder=9)
6565
plt.fill_between(X_, y_mean - y_std, y_mean + y_std,
66-
alpha=0.5, color='k')
66+
alpha=0.2, color='k')
6767

6868
y_samples = gp.sample_y(X_[:, np.newaxis], 10)
6969
plt.plot(X_, y_samples, lw=1)
70-
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
70+
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))
7171
plt.xlim(0, 5)
7272
plt.ylim(-3, 3)
7373
plt.title("Posterior (kernel: %s)\n Log-Likelihood: %.3f"

0 commit comments

Comments
 (0)
0