8000 Added colorblind compatibility · scikit-learn/scikit-learn@245c956 · GitHub
[go: up one dir, main page]

Skip to content

Commit 245c956

Browse files
committed
Added colorblind compatibility
1 parent 0ba5c24 commit 245c956

File tree

6 files changed

+49
-39
lines changed

6 files changed

+49
-39
lines changed

examples/covariance/plot_lw_vs_oas.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,9 +60,9 @@
6060
# plot MSE
6161
plt.subplot(2, 1, 1)
6262
plt.errorbar(n_samples_range, lw_mse.mean(1), yerr=lw_mse.std(1),
63-
label='Ledoit-Wolf', color='g')
63+
label='Ledoit-Wolf', color='navy', lw=2)
6464
plt.errorbar(n_samples_range, oa_mse.mean(1), yerr=oa_mse.std(1),
65-
label='OAS', color='r')
65+
label='OAS', color='darkorange', lw=2)
6666
plt.ylabel("Squared error")
6767
plt.legend(loc="upper right")
6868
plt.title("Comparison of covariance estimators")
@@ -71,9 +71,9 @@
7171
# plot shrinkage coefficient
7272
plt.subplot(2, 1, 2)
7373
plt.errorbar(n_samples_range, lw_shrinkage.mean(1), yerr=lw_shrinkage.std(1),
74-
label='Ledoit-Wolf', color='g')
74+
label='Ledoit-Wolf', color='navy', lw=2)
7575
plt.errorbar(n_samples_range, oa_shrinkage.mean(1), yerr=oa_shrinkage.std(1),
76-
label='OAS', color='r')
76+
label='OAS', color='darkorange', lw=2)
7777
plt.xlabel("n_samples")
7878
plt.ylabel("Shrinkage")
7979
plt.legend(loc="lower right")

examples/covariance/plot_robust_vs_empirical_covariance.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,11 @@
4242
4343
References
4444
----------
45-
.. [1] P. J. Rousseeuw. Least median of squares regression. J. Am
46-
Stat Ass, 79:871, 1984.
47-
.. [2] Johanna Hardin, David M Rocke. Journal of Computational and
48-
Graphical Statistics. December 1, 2005, 14(4): 928-946.
45+
.. [1] P. J. Rousseeuw. Least median of squares regression. Journal of American
46+
Statistical Ass., 79:871, 1984.
47+
.. [2] Johanna Hardin, David M Rocke. The distribution of robust distances.
48+
Journal of Computational and Graphical Statistics. December 1, 2005,
49+
14(4): 928-946.
4950
.. [3] Zoubir A., Koivunen V., Chakhchoukh Y. and Muma M. (2012). Robust
5051
estimation in signal processing: A tutorial-style treatment of
5152
fundamental concepts. IEEE Signal Processing Magazine 29(4), 61-80.
@@ -115,15 +116,16 @@
115116
# Display results
116117
font_prop = matplotlib.font_manager.FontProperties(size=11)
117118
plt.subplot(2, 1, 1)
119+
lw = 2
118120
plt.errorbar(range_n_outliers, err_loc_mcd.mean(1),
119121
yerr=err_loc_mcd.std(1) / np.sqrt(repeat),
120-
label="Robust location", color='m')
122+
label="Robust location", lw=lw, color='m')
121123
plt.errorbar(range_n_outliers, err_loc_emp_full.mean(1),
122124
yerr=err_loc_emp_full.std(1) / np.sqrt(repeat),
123-
label="Full data set mean", color='green')
125+
label="Full data set mean", lw=lw, color='green')
124126
plt.errorbar(range_n_outliers, err_loc_emp_pure.mean(1),
125127
yerr=err_loc_emp_pure.std(1) / np.sqrt(repeat),
126-
label="Pure data set mean", color='black')
128+
label="Pure data set mean", lw=lw, color='black')
127129
plt.title("Influence of outliers on the location estimation")
128130
plt.ylabel(r"Error ($||\mu - \hat{\mu}||_2^2$)")
129131
plt.legend(loc="upper left", prop=font_prop)

examples/gaussian_process/plot_compare_gpr_krr.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -102,17 +102,20 @@
102102
% (time.time() - stime))
103103

104104
# Plot results
105-
plt.figure(figsize = (10,5))
105+
plt.figure(figsize=(10, 5))
106+
lw = 2
106107
plt.scatter(X, y, c='k', label='data')
107-
plt.plot(X_plot, np.sin(X_plot), c='k', label='True')
108-
plt.plot(X_plot, y_kr, c='g', label='KRR (%s)' % kr.best_params_)
109-
plt.plot(X_plot, y_gpr, c='r', label='GPR (%s)' % gpr.kernel_)
110-
plt.fill_between(X_plot[:, 0], y_gpr - y_std, y_gpr + y_std, color='r',
108+
plt.plot(X_plot, np.sin(X_plot), color='navy', lw=lw, label='True')
109+
plt.plot(X_plot, y_kr, color='turquoise', lw=lw,
110+
label='KRR (%s)' % kr.best_params_)
111+
plt.plot(X_plot, y_gpr, color='darkorange', lw=lw,
112+
label='GPR (%s)' % gpr.kernel_)
113+
plt.fill_between(X_plot[:, 0], y_gpr - y_std, y_gpr + y_std, color='darkorange',
111114
alpha=0.2)
112115
plt.xlabel('data')
113116
plt.ylabel('target')
114117
plt.xlim(0, 20)
115118
plt.ylim(-4, 4)
116119
plt.title('GPR versus Kernel Ridge')
117-
plt.legend(loc=9, prop={'size': 10})
120+
plt.legend(loc="best", scatterpoints=1, prop={'size': 8})
118121
plt.show()

examples/manifold/plot_mds.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -61,23 +61,25 @@
6161
fig = plt.figure(1)
6262
ax = plt.axes([0., 0., 1., 1.])
6363

64-
plt.scatter(X_true[:, 0], X_true[:, 1], c='r', s=20)
65-
plt.scatter(pos[:, 0], pos[:, 1], s=20, c='g')
66-
plt.scatter(npos[:, 0], npos[:, 1], s=20, c='b')
67-
plt.legend(('True position', 'MDS', 'NMDS'), loc='best')
64+
s = 100
65+
plt.scatter(X_true[:, 0], X_true[:, 1], color='navy', s=s, lw=0,
66+
label='True Position')
67+
plt.scatter(pos[:, 0], pos[:, 1], color='turquoise', s=s, lw=0, label='MDS')
68+
plt.scatter(npos[:, 0], npos[:, 1], color='darkorange', s=s, lw=0, label='NMDS')
69+
plt.legend(scatterpoints=1, loc='best', shadow=False)
6870

6971
similarities = similarities.max() / similarities * 100
7072
similarities[np.isinf(similarities)] = 0
7173

7274
# Plot the edges
7375
start_idx, end_idx = np.where(pos)
74-
#a sequence of (*line0*, *line1*, *line2*), where::
76+
# a sequence of (*line0*, *line1*, *line2*), where::
7577
# linen = (x0, y0), (x1, y1), ... (xm, ym)
7678
segments = [[X_true[i, :], X_true[j, :]]
7779
for i in range(len(pos)) for j in range(len(pos))]
7880
values = np.abs(similarities)
7981
lc = LineCollection(segments,
80-
zorder=0, cmap=plt.cm.hot_r,
82+
zorder=0, cmap=plt.cm.Blues,
8183
norm=plt.Normalize(0, values.max()))
8284
lc.set_array(similarities.flatten())
8385
lc.set_linewidths(0.5 * np.ones(len(segments)))

examples/preprocessing/plot_function_transformer.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,13 +57,16 @@ def drop_first_component(X, y):
5757

5858
if __name__ == '__main__':
5959
X, y = generate_dataset()
60-
plt.scatter(X[:, 0], X[:, 1], c=y, s=50)
61-
plt.show()
60+
lw = 0
61+
plt.figure()
62+
plt.scatter(X[:, 0], X[:, 1], c=y, lw=lw)
63+
plt.figure()
6264
X_transformed, y_transformed = drop_first_component(*generate_dataset())
6365
plt.scatter(
6466
X_transformed[:, 0],
6567
np.zeros(len(X_transformed)),
6668
c=y_transformed,
67-
s=50,
69+
lw=lw,
70+
s=60
6871
)
6972
plt.show()

examples/semi_supervised/plot_label_propagation_structure.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -38,24 +38,24 @@
3838
output_labels = label_spread.transduction_
3939
plt.figure(figsize=(8.5, 4))
4040
plt.subplot(1, 2, 1)
41-
plot_outer_labeled, = plt.plot(X[labels == outer, 0],
42-
X[labels == outer, 1], 'rs')
43-
plot_unlabeled, = plt.plot(X[labels == -1, 0], X[labels == -1, 1], 'g.')
44-
plot_inner_labeled, = plt.plot(X[labels == inner, 0],
45-
X[labels == inner, 1], 'bs')
46-
plt.legend((plot_outer_labeled, plot_inner_labeled, plot_unlabeled),
47-
('Outer Labeled', 'Inner Labeled', 'Unlabeled'), 'upper left',
48-
numpoints=1, shadow=False)
49-
plt.title("Raw data (2 classes=red and blue)")
41+
plt.scatter(X[labels == outer, 0], X[labels == outer, 1], color='navy',
42+
marker='s', lw=0, label="outer labeled", s=10)
43+
plt.scatter(X[labels == inner, 0], X[labels == inner, 1], color='c',
44+
marker='s', lw=0, label='outer labeled', s=10)
45+
plt.scatter(X[labels == -1, 0], X[labels == -1, 1], color='darkorange',
46+
marker='.', label='unlabeled')
47+
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
48+
plt.title("Raw data (2 classes=outer and inner)")
5049

5150
plt.subplot(1, 2, 2)
5251
output_label_array = np.asarray(output_labels)
5352
outer_numbers = np.where(output_label_array == outer)[0]
5453
inner_numbers = np.where(output_label_array == inner)[0]
55-
plot_outer, = plt.plot(X[outer_numbers, 0], X[outer_numbers, 1], 'rs')
56-
plot_inner, = plt.plot(X[inner_numbers, 0], X[inner_numbers, 1], 'bs')
57-
plt.legend((plot_outer, plot_inner), ('Outer Learned', 'Inner Learned'),
58-
'upper left', numpoints=1, shadow=False)
54+
plt.scatter(X[outer_numbers, 0], X[outer_numbers, 1], color='navy',
55+
mar 1241 ker='s', lw=0, s=10, label="outer learned")
56+
plt.scatter(X[inner_numbers, 0], X[inner_numbers, 1], color='c',
57+
marker='s', lw=0, s=10, label="inner learned")
58+
plt.legend(scatterpoints=1, shadow=False, loc='uppper right')
5959
plt.title("Labels learned with Label Spreading (KNN)")
6060

6161
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)

0 commit comments

Comments
 (0)
0