39
39
40
40
41
41
# Utility function to report best scores
42
- def report (cv_scores , n_top = 3 ):
43
- top_scores = sorted (cv_scores , key = itemgetter (1 ), reverse = True )[:n_top ]
42
+ def report (grid_scores , n_top = 3 ):
43
+ top_scores = sorted (grid_scores , key = itemgetter (1 ), reverse = True )[:n_top ]
44
44
for i , score in enumerate (top_scores ):
45
45
print ("Model with rank: {0}" .format (i + 1 ))
46
46
print ("Mean validation score: {0:.3f} (std: {1:.3f})" .format (
@@ -67,7 +67,7 @@ def report(cv_scores, n_top=3):
67
67
random_search .fit (X , y )
68
68
print ("RandomizedSearchCV took %.2f seconds for %d candidates"
69
69
" parameter settings." % ((time () - start ), n_iter_search ))
70
- report (random_search .cv_scores_ )
70
+ report (random_search .grid_scores_ )
71
71
72
72
# use a full grid over all parameters
73
73
param_grid = {"max_depth" : [3 , None ],
@@ -82,5 +82,5 @@ def report(cv_scores, n_top=3):
82
82
grid_search .fit (X , y )
83
83
84
84
print ("GridSearchCV took %.2f seconds for %d candidate parameter settings."
85
- % (time () - start , len (grid_search .cv_scores_ )))
86
- report (grid_search .cv_scores_ )
85
+ % (time () - start , len (grid_search .grid_scores_ )))
86
+ report(grid_search .grid_scores_ )
0 commit comments