4.2) Alexnet
4.2) Alexnet
4.2) Alexnet
ipynb - Colaboratory
4.2) AlexNet
CLASS_NAMES= ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship'
train_ds=tf.data.Dataset.from_tensor_slices((train_images,train_labels))
test_ds=tf.data.Dataset.from_tensor_slices((test_images,test_labels))
plt.figure(figsize=(30,30))
for i,(image,label) in enumerate(train_ds.take(20)):
#print(label)
ax=plt.subplot(5,5,i+1)
plt.imshow(image)
plt.title(CLASS_NAMES[label.numpy()[0]])
plt.axis('off')
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 1/10
12/30/2020 AlexNet.ipynb - Colaboratory
def process_image(image,label):
image=tf.image.per_image_standardization(image)
image=tf.image.resize(image,(64,64))
return image,label
train_ds_size=tf.data.experimental.cardinality(train_ds).numpy()
test_ds_size=tf.data.experimental.cardinality(test_ds).numpy()
print('Train size:',train_ds_size)
print('Test size:',test_ds_size)
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 2/10
12/30/2020 AlexNet.ipynb - Colaboratory
train_ds=(train_ds
.map(process_image)
.shuffle(buffer_size=train_ds_size)
.batch(batch_size=32,drop_remainder=True)
)
test_ds=(test_ds
.map(process_image)
.shuffle(buffer_size=test_ds_size)
.batch(batch_size=32,drop_remainder=True)
)
# The Dropout layer randomly sets input units to 0 with a frequency of `rate`at each step dur
def drop_out(X, rate):
keep_probability = 1 - rate
mask = nd.random_uniform(0, 1.0, X.shape, ctx=X.context) < keep_probability
if keep_probability > 0.0:
scale = (1/keep_probability)
else:
scale = 0.0
return mask * X * scale
image_shape=(64,64,3)
model= tf.keras.Sequential()
#from keras.models import Sequential
# 1st Convolutional Layer
model.add(tf.keras.layers.Conv2D(filters=96, input_shape=image_shape, kernel_size=(11,11),\
strides=(4,4), padding='valid'))
model.add(tf.keras.layers.Activation('relu'))
# Pooling
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='valid'))
# Batch Normalisation before passing it to the next layer
model.add(tf.keras.layers.BatchNormalization())
# 2nd Convolutional Layer
model.add(tf.keras.layers.Conv2D(filters=256, kernel_size=(5,5), strides=(1,1), padding='same
model.add(tf.keras.layers.Activation('relu'))
# Pooling
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='valid'))
# Batch Normalisation
model.add(tf.keras.layers.BatchNormalization())
model.compile(
loss='sparse_categorical_crossentropy',
optimizer=tf.optimizers.SGD(lr=0.001),
metrics=['accuracy']
)
model.summary()
_________________________________________________________________
conv2d_11 (Conv2D) (None, 7, 7, 256) 614656
_________________________________________________________________
( ) ( )
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 5/10
12/30/2020 AlexNet.ipynb - Colaboratory
activation_19 (Activation) (None, 7, 7, 256) 0
_________________________________________________________________
max_pooling2d_7 (MaxPooling2 (None, 3, 3, 256) 0
_________________________________________________________________
batch_normalization_17 (Batc (None, 3, 3, 256) 1024
_________________________________________________________________
conv2d_12 (Conv2D) (None, 3, 3, 384) 885120
_________________________________________________________________
activation_20 (Activation) (None, 3, 3, 384) 0
_________________________________________________________________
batch_normalization_18 (Batc (None, 3, 3, 384) 1536
_________________________________________________________________
conv2d_13 (Conv2D) (None, 3, 3, 384) 1327488
_________________________________________________________________
activation_21 (Activation) (None, 3, 3, 384) 0
_________________________________________________________________
batch_normalization_19 (Batc (None, 3, 3, 384) 1536
_________________________________________________________________
conv2d_14 (Conv2D) (None, 3, 3, 256) 884992
_________________________________________________________________
activation_22 (Activation) (None, 3, 3, 256) 0
_________________________________________________________________
max_pooling2d_8 (MaxPooling2 (None, 1, 1, 256) 0
_________________________________________________________________
batch_normalization_20 (Batc (None, 1, 1, 256) 1024
_________________________________________________________________
flatten_2 (Flatten) (None, 256) 0
_________________________________________________________________
dense_6 (Dense) (None, 4096) 1052672
_________________________________________________________________
activation_23 (Activation) (None, 4096) 0
_________________________________________________________________
dropout_6 (Dropout) (None, 4096) 0
_________________________________________________________________
batch_normalization_21 (Batc (None, 4096) 16384
_________________________________________________________________
dense_7 (Dense) (None, 4096) 16781312
_________________________________________________________________
activation_24 (Activation) (None, 4096) 0
_________________________________________________________________
dropout_7 (Dropout) (None, 4096) 0
_________________________________________________________________
batch_normalization_22 (Batc (None, 4096) 16384
_________________________________________________________________
history=model.fit(
train_ds,
epochs=25,
validation_data=test_ds,
validation_freq=1
)
Epoch 1/25
1562/1562 [==============================] - 1059s 672ms/step - loss: 7.6388 - accuracy
Epoch 2/25
1562/1562 [==============================] - 1061s 674ms/step - loss: 7.0669 - accuracy
Epoch 3/25
1562/1562 [==============================] - 1061s 675ms/step - loss: 6.7147 - accuracy
Epoch 4/25
1562/1562 [==============================] - 1067s 678ms/step - loss: 6.4048 - accuracy
Epoch 5/25
1562/1562 [==============================] - 1068s 678ms/step - loss: 6.1540 - accuracy
Epoch 6/25
1562/1562 [==============================] - 1072s 682ms/step - loss: 5.8836 - accuracy
Epoch 7/25
1562/1562 [==============================] - 1074s 682ms/step - loss: 5.6238 - accuracy
Epoch 8/25
1562/1562 [==============================] - 1074s 682ms/step - loss: 5.4209 - accuracy
Epoch 9/25
1562/1562 [==============================] - 1076s 684ms/step - loss: 5.2264 - accuracy
Epoch 10/25
1562/1562 [==============================] - 1081s 686ms/step - loss: 5.0536 - accuracy
Epoch 11/25
1562/1562 [==============================] - 1065s 677ms/step - loss: 4.8800 - accuracy
Epoch 12/25
1562/1562 [==============================] - 1066s 678ms/step - loss: 4.7012 - accuracy
Epoch 13/25
1562/1562 [==============================] - 1078s 685ms/step - loss: 4.5094 - accuracy
Epoch 14/25
1562/1562 [==============================] - 1069s 679ms/step - loss: 4.3561 - accuracy
Epoch 15/25
1562/1562 [==============================] - 1068s 679ms/step - loss: 4.1762 - accuracy
Epoch 16/25
1562/1562 [==============================] - 1082s 688ms/step - loss: 4.0805 - accuracy
Epoch 17/25
1562/1562 [==============================] - 1090s 692ms/step - loss: 3.9502 - accuracy
Epoch 18/25
1562/1562 [==============================] - 1077s 684ms/step - loss: 3.9702 - accuracy
Epoch 19/25
1562/1562 [==============================] - 1080s 686ms/step - loss: 3.7835 - accuracy
Epoch 20/25
1562/1562 [==============================] - 1089s 692ms/step - loss: 3.6462 - accuracy
Epoch 21/25
1562/1562 [==============================] - 1086s 691ms/step - loss: 3.5730 - accuracy
Epoch 22/25
1562/1562 [==============================] - 1085s 689ms/step - loss: 3.5225 - accuracy
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 7/10
12/30/2020 AlexNet.ipynb - Colaboratory
Epoch 23/25
1562/1562 [==============================] - 1074s 682ms/step - loss: 3.4097 - accuracy
Epoch 24/25
1562/1562 [==============================] - 1065s 677ms/step - loss: 3.3171 - accuracy
Epoch 25/25
1562/1562 [==============================] - 1075s 683ms/step - loss: 3.2388 - accuracy
model.history.history.keys()
f,ax=plt.subplots(2,1,figsize=(10,10))
#Assigning the first subplot to graph training loss and validation loss
ax[0].plot(model.history.history['loss'],color='b',label='Training Loss')
ax[0].plot(model.history.history['val_loss'],color='r',label='Validation Loss')
plt.legend()
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 8/10
12/30/2020 AlexNet.ipynb - Colaboratory
<matplotlib.legend.Legend at 0x7fb9ba1af048>
4.2.1 ) Dropout
Horizontal Flip
https://colab.research.google.com/drive/1j26lqaxBKqqFnt867KjZNkrsjzQ3o64m#scrollTo=aQd5NvWeYQWm&printMode=true 10/10