diff --git a/examples/release_highlights/plot_release_highlights_0_22_0.py b/examples/release_highlights/plot_release_highlights_0_22_0.py index 0d53dea9f7640..3e01cebbc60fd 100644 --- a/examples/release_highlights/plot_release_highlights_0_22_0.py +++ b/examples/release_highlights/plot_release_highlights_0_22_0.py @@ -100,10 +100,15 @@ # The :func:`inspection.permutation_importance` can be used to get an # estimate of the importance of each feature, for any fitted estimator: +import numpy as np +import matplotlib.pyplot as plt +from sklearn.datasets import make_classification from sklearn.ensemble import RandomForestClassifier from sklearn.inspection import permutation_importance X, y = make_classification(random_state=0, n_features=5, n_informative=3) +feature_names = np.array([f'x_{i}' for i in range(X.shape[1])]) + rf = RandomForestClassifier(random_state=0).fit(X, y) result = permutation_importance(rf, X, y, n_repeats=10, random_state=0, n_jobs=-1) @@ -111,7 +116,7 @@ fig, ax = plt.subplots() sorted_idx = result.importances_mean.argsort() ax.boxplot(result.importances[sorted_idx].T, - vert=False, labels=range(X.shape[1])) + vert=False, labels=feature_names[sorted_idx]) ax.set_title("Permutation Importance of each feature") ax.set_ylabel("Features") fig.tight_layout()