@@ -215,8 +215,8 @@ elements in the output array must range from 0 to 4 inclusive.
215
215
Generally, the class labels start from ``0 `` to ``N-1 `` where ``N `` is
216
216
the number of classes.
217
217
218
- Note that the project only supports classification problems where each
219
- sample is assigned to only one class.
218
+ Note that the project only supports that each sample is assigned to only
219
+ one class.
220
220
221
221
.. _header-n89 :
222
222
@@ -396,11 +396,10 @@ predicting the outputs based on the current solution's
396
396
attribute is updated by weights evolved by the genetic algorithm after
397
397
each generation.
398
398
399
- PyGAD 2.0.0 and higher has a new parameter accepted by the ``pygad.GA ``
400
- class constructor named ``callback_generation ``. It could be assigned to
401
- a function that is called after each generation. The function must
402
- accept a single parameter representing the instance of the ``pygad.GA ``
403
- class.
399
+ PyGAD has a parameter accepted by the ``pygad.GA `` class constructor
400
+ named ``on_generation ``. It could be assigned to a function that is
401
+ called after each generation. The function must accept a single
402
+ parameter representing the instance of the ``pygad.GA `` class.
404
403
405
404
This callback function can be used to update the ``trained_weights ``
406
405
attribute of layers of each network in the population.
@@ -470,7 +469,7 @@ number of generations is 10.
470
469
crossover_type = crossover_type,
471
470
mutation_type = mutation_type,
472
471
keep_parents = keep_parents,
473
- callback_generation = callback_generation)
472
+ on_generation = callback_generation)
474
473
475
474
The last step for training the neural networks using the genetic
476
475
algorithm is calling the ``run() `` method.
@@ -618,12 +617,13 @@ complete code is listed below.
618
617
def callback_generation (ga_instance ):
619
618
global GACNN_instance , last_fitness
620
619
621
- population_matrices = gacnn.population_as_matrices(population_networks = GACNN_instance .population_networks,
620
+ population_matrices = pygad. gacnn.population_as_matrices(population_networks = GACNN_instance .population_networks,
622
621
population_vectors = ga_instance.population)
623
622
624
623
GACNN_instance .update_population_trained_weights(population_trained_weights = population_matrices)
625
624
626
625
print (" Generation = {generation} " .format(generation = ga_instance.generations_completed))
626
+ print (" Fitness = {fitness} " .format(fitness = ga_instance.best_solutions_fitness))
627
627
628
628
data_inputs = numpy.load(" dataset_inputs.npy" )
629
629
data_outputs = numpy.load(" dataset_outputs.npy" )
@@ -634,35 +634,35 @@ complete code is listed below.
634
634
data_inputs = data_inputs
635
635
data_outputs = data_outputs
636
636
637
- input_layer = cnn.Input2D(input_shape = sample_shape)
638
- conv_layer1 = cnn.Conv2D(num_filters = 2 ,
639
- kernel_size = 3 ,
640
- previous_layer = input_layer,
641
- activation_function = " relu" )
642
- average_pooling_layer = cnn.AveragePooling2D(pool_size = 5 ,
643
- previous_layer = conv_layer1,
644
- stride = 3 )
637
+ input_layer = pygad. cnn.Input2D(input_shape = sample_shape)
638
+ conv_layer1 = pygad. cnn.Conv2D(num_filters = 2 ,
639
+ kernel_size = 3 ,
640
+ previous_layer = input_layer,
641
+ activation_function = " relu" )
642
+ average_pooling_layer = pygad. cnn.AveragePooling2D(pool_size = 5 ,
643
+ previous_layer = conv_layer1,
644
+ stride =
8000
3 )
645
645
646
- flatten_layer = cnn.Flatten(previous_layer = average_pooling_layer)
647
- dense_layer2 = cnn.Dense(num_neurons = num_classes,
648
- previous_layer = flatten_layer,
649
- activation_function = " softmax" )
646
+ flatten_layer = pygad. cnn.Flatten(previous_layer = average_pooling_layer)
647
+ dense_layer2 = pygad. cnn.Dense(num_neurons = num_classes,
648
+ previous_layer = flatten_layer,
649
+ activation_function = " softmax" )
650
650
651
- model = cnn.Model(last_layer = dense_layer2,
652
- epochs = 1 ,
653
- learning_rate = 0.01 )
651
+ model = pygad. cnn.Model(last_layer = dense_layer2,
652
+ epochs = 1 ,
653
+ learning_rate = 0.01 )
654
654
655
655
model.summary()
656
656
657
657
658
- GACNN_instance = gacnn.GACNN(model = model,
658
+ GACNN_instance = pygad. gacnn.GACNN(model = model,
659
659
num_solutions = 4 )
660
660
661
661
# GACNN_instance.update_population_trained_weights(population_trained_weights=population_matrices)
662
662
663
663
# population does not hold the numerical weights of the network instead it holds a list of references to each last layer of each network (i.e. solution) in the population. A solution or a network can be used interchangeably.
664
664
# If there is a population with 3 solutions (i.e. networks), then the population is a list with 3 elements. Each element is a reference to the last layer of each network. Using such a reference, all details of the network can be accessed.
665
- population_vectors = gacnn.population_as_vectors(population_networks = GACNN_instance .population_networks)
665
+ population_vectors = pygad. gacnn.population_as_vectors(population_networks = GACNN_instance .population_networks)
666
666
667
667
# To prepare the initial population, there are 2 ways:
668
668
# 1) Prepare it yourself and pass it to the initial_population parameter. This way is useful when the user wants to start the genetic algorithm with a custom initial population.
@@ -692,7 +692,7 @@ complete code is listed below.
692
692
crossover_type = crossover_type,
693
693
mutation_type = mutation_type,
694
694
keep_parents = keep_parents,
695
- callback_generation = callback_generation)
695
+ on_generation = callback_generation)
696
696
697
697
ga_instance.run()
698
698
0 commit comments