Modified National Institute of Standards and Technology (MNIST) Data: Multi-Layer Perceptron (MLP)

In [1]:
from __future__ import print_function
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.optimizers import SGD
from keras.utils import np_utils

np.random.seed(1671)  # for reproducibility
Using CNTK backend
In [2]:
# network and training
NB_EPOCH = 20
BATCH_SIZE = 128
VERBOSE = 1
NB_CLASSES = 10   # number of outputs = number of digits
OPTIMIZER = SGD() # optimizer, explained later in this chapter
N_HIDDEN = 128
VALIDATION_SPLIT=0.2 # how much TRAIN is reserved for VALIDATION
In [3]:
# data: shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()

#X_train is 60000 rows of 28x28 values --> reshaped in 60000 x 784
RESHAPED = 784
#
X_train = X_train.reshape(60000, RESHAPED)
X_test = X_test.reshape(10000, RESHAPED)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
In [4]:
# normalize
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, NB_CLASSES)
Y_test = np_utils.to_categorical(y_test, NB_CLASSES)
60000 train samples
10000 test samples
In [5]:
# M_HIDDEN hidden layers
# 10 outputs
# final stage is softmax

model = Sequential()
model.add(Dense(N_HIDDEN, input_shape=(RESHAPED,)))
model.add(Activation('relu'))
model.add(Dense(N_HIDDEN))
model.add(Activation('relu'))
model.add(Dense(NB_CLASSES))
model.add(Activation('softmax'))
model.summary()
# (784 + 1) * 128 = 100,480 parameters
# (128 + 1) * 128 =  16,512 parameters

model.compile(loss='categorical_crossentropy',
              optimizer=OPTIMIZER,
              metrics=['accuracy'])

history = model.fit(X_train, Y_train,
                    batch_size=BATCH_SIZE, epochs=NB_EPOCH,
                    verbose=VERBOSE, validation_split=VALIDATION_SPLIT)

score = model.evaluate(X_test, Y_test, verbose=VERBOSE)
print("\nTest score:", score[0])
print('Test accuracy:', score[1])
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 128)               100480    
_________________________________________________________________
activation_1 (Activation)    (None, 128)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 128)               16512     
_________________________________________________________________
activation_2 (Activation)    (None, 128)               0         
_________________________________________________________________
dense_3 (Dense)              (None, 10)                1290      
_________________________________________________________________
activation_3 (Activation)    (None, 10)                0         
=================================================================
Total params: 118,282
Trainable params: 118,282
Non-trainable params: 0
_________________________________________________________________
Train on 48000 samples, validate on 12000 samples
Epoch 1/20
 6400/48000 [===>..........................] - ETA: 1s - loss: 2.1968 - acc: 0.2236
/home/dadebarr/anaconda3/lib/python3.5/site-packages/cntk/core.py:351: UserWarning: your data is of type "float64", but your input variable (uid "Input61") expects "<class 'numpy.float32'>". Please convert your data beforehand to speed up training.
  (sample.dtype, var.uid, str(var.dtype)))
48000/48000 [==============================] - 1s - loss: 1.4225 - acc: 0.6404 - val_loss: 0.7217 - val_acc: 0.8492
Epoch 2/20
48000/48000 [==============================] - 1s - loss: 0.5798 - acc: 0.8572 - val_loss: 0.4419 - val_acc: 0.8843
Epoch 3/20
48000/48000 [==============================] - 1s - loss: 0.4294 - acc: 0.8833 - val_loss: 0.3667 - val_acc: 0.8975
Epoch 4/20
48000/48000 [==============================] - 1s - loss: 0.3731 - acc: 0.8955 - val_loss: 0.3308 - val_acc: 0.9051
Epoch 5/20
48000/48000 [==============================] - 1s - loss: 0.3412 - acc: 0.9031 - val_loss: 0.3075 - val_acc: 0.9110
Epoch 6/20
48000/48000 [==============================] - 1s - loss: 0.3187 - acc: 0.9086 - val_loss: 0.2907 - val_acc: 0.9161
Epoch 7/20
48000/48000 [==============================] - 1s - loss: 0.3014 - acc: 0.9136 - val_loss: 0.2781 - val_acc: 0.9203
Epoch 8/20
48000/48000 [==============================] - 1s - loss: 0.2864 - acc: 0.9185 - val_loss: 0.2669 - val_acc: 0.9221
Epoch 9/20
48000/48000 [==============================] - 1s - loss: 0.2740 - acc: 0.9220 - val_loss: 0.2574 - val_acc: 0.9247
Epoch 10/20
48000/48000 [==============================] - 1s - loss: 0.2626 - acc: 0.9252 - val_loss: 0.2488 - val_acc: 0.9288
Epoch 11/20
48000/48000 [==============================] - 1s - loss: 0.2526 - acc: 0.9280 - val_loss: 0.2402 - val_acc: 0.9312
Epoch 12/20
48000/48000 [==============================] - 1s - loss: 0.2433 - acc: 0.9305 - val_loss: 0.2350 - val_acc: 0.9317
Epoch 13/20
48000/48000 [==============================] - 1s - loss: 0.2350 - acc: 0.9327 - val_loss: 0.2263 - val_acc: 0.9361
Epoch 14/20
48000/48000 [==============================] - 1s - loss: 0.2271 - acc: 0.9348 - val_loss: 0.2215 - val_acc: 0.9390
Epoch 15/20
48000/48000 [==============================] - 1s - loss: 0.2199 - acc: 0.9375 - val_loss: 0.2159 - val_acc: 0.9398
Epoch 16/20
48000/48000 [==============================] - 1s - loss: 0.2132 - acc: 0.9390 - val_loss: 0.2112 - val_acc: 0.9416
Epoch 17/20
48000/48000 [==============================] - 1s - loss: 0.2067 - acc: 0.9417 - val_loss: 0.2045 - val_acc: 0.9433
Epoch 18/20
48000/48000 [==============================] - 1s - loss: 0.2008 - acc: 0.9431 - val_loss: 0.2005 - val_acc: 0.9450
Epoch 19/20
48000/48000 [==============================] - 1s - loss: 0.1948 - acc: 0.9449 - val_loss: 0.1963 - val_acc: 0.9461
Epoch 20/20
48000/48000 [==============================] - 1s - loss: 0.1897 - acc: 0.9466 - val_loss: 0.1911 - val_acc: 0.9485
 9568/10000 [===========================>..] - ETA: 0s
Test score: 0.188220301375
Test accuracy: 0.9449
In [ ]: