C1W3: Improve MNIST with Convolutions
C1W3: Improve MNIST with Convolutions#
import tensorflow as tf
from tensorflow.keras import layers, losses
MNIST dataset
60,000 28x28 grayscale images of the 10 digits
tf.keras.datasets.mnist.load_data
(x_train, y_train), _ = tf.keras.datasets.mnist.load_data()
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs={}):
if logs.get('accuracy') is not None and logs.get('accuracy') > 0.995:
print('\nReached 99.5% accuracy so cancelling training!')
self.model.stop_training = True
def convolutional_model():
model = tf.keras.Sequential([
layers.Lambda(lambda x: tf.expand_dims(x, axis=-1), input_shape=(28, 28)),
layers.Rescaling(1/255),
layers.Conv2D(32, 3, activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dense(10)])
model.compile(optimizer='adam',
loss=losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
return model
model = convolutional_model()
callbacks = myCallback()
history = model.fit(x_train, y_train, epochs=10, callbacks=[callbacks])
Epoch 1/10
1875/1875 [==============================] - 12s 5ms/step - loss: 0.1582 - accuracy: 0.9529
Epoch 2/10
1875/1875 [==============================] - 10s 6ms/step - loss: 0.0561 - accuracy: 0.9833
Epoch 3/10
1875/1875 [==============================] - 10s 5ms/step - loss: 0.0367 - accuracy: 0.9883
Epoch 4/10
1875/1875 [==============================] - 11s 6ms/step - loss: 0.0249 - accuracy: 0.9923
Epoch 5/10
1875/1875 [==============================] - 10s 6ms/step - loss: 0.0182 - accuracy: 0.9942
Epoch 6/10
1873/1875 [============================>.] - ETA: 0s - loss: 0.0124 - accuracy: 0.9959
Reached 99.5% accuracy so cancelling training!
1875/1875 [==============================] - 11s 6ms/step - loss: 0.0124 - accuracy: 0.9959