Modified National Institute of Standards and Technology (MNIST) Data: Multi-Layer Perceptron (MLP) with Dropout

In [1]:
from __future__ import print_function
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.utils import np_utils

import matplotlib.pyplot as plt
%matplotlib inline

np.random.seed(1671)  # for reproducibility
Using CNTK backend
In [2]:
# network and training
NB_EPOCH = 250
BATCH_SIZE = 128
VERBOSE = 1
NB_CLASSES = 10   # number of outputs = number of digits
OPTIMIZER = SGD() # optimizer, explained later in this chapter
N_HIDDEN = 128
VALIDATION_SPLIT=0.2 # how much TRAIN is reserved for VALIDATION
DROPOUT = 0.3
In [3]:
# data: shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()

#X_train is 60000 rows of 28x28 values --> reshaped in 60000 x 784
RESHAPED = 784
#
X_train = X_train.reshape(60000, RESHAPED)
X_test = X_test.reshape(10000, RESHAPED)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
In [4]:
# normalize
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, NB_CLASSES)
Y_test = np_utils.to_categorical(y_test, NB_CLASSES)
60000 train samples
10000 test samples
In [5]:
# M_HIDDEN hidden layers
# 10 outputs
# final stage is softmax

model = Sequential()
model.add(Dense(N_HIDDEN, input_shape=(RESHAPED,)))
model.add(Activation('relu'))
model.add(Dropout(DROPOUT))
model.add(Dense(N_HIDDEN))
model.add(Activation('relu'))
model.add(Dropout(DROPOUT))
model.add(Dense(NB_CLASSES))
model.add(Activation('softmax'))
model.summary()

model.compile(loss='categorical_crossentropy',
              optimizer=OPTIMIZER,
              metrics=['accuracy'])

history = model.fit(X_train, Y_train,
                    batch_size=BATCH_SIZE, epochs=NB_EPOCH,
                    verbose=VERBOSE, validation_split=VALIDATION_SPLIT)

score = model.evaluate(X_test, Y_test, verbose=VERBOSE)
print("\nTest score:", score[0])
print('Test accuracy:', score[1])
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 128)               100480    
_________________________________________________________________
activation_1 (Activation)    (None, 128)               0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 128)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 128)               16512     
_________________________________________________________________
activation_2 (Activation)    (None, 128)               0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 128)               0         
_________________________________________________________________
dense_3 (Dense)              (None, 10)                1290      
_________________________________________________________________
activation_3 (Activation)    (None, 10)                0         
=================================================================
Total params: 118,282
Trainable params: 118,282
Non-trainable params: 0
_________________________________________________________________
Train on 48000 samples, validate on 12000 samples
Epoch 1/250
 6144/48000 [==>...........................] - ETA: 1s - loss: 2.2647 - acc: 0.1549
/home/dadebarr/anaconda3/lib/python3.5/site-packages/cntk/core.py:351: UserWarning: your data is of type "float64", but your input variable (uid "Input67") expects "<class 'numpy.float32'>". Please convert your data beforehand to speed up training.
  (sample.dtype, var.uid, str(var.dtype)))
48000/48000 [==============================] - 1s - loss: 1.7118 - acc: 0.4506 - val_loss: 0.9207 - val_acc: 0.8165
Epoch 2/250
48000/48000 [==============================] - 1s - loss: 0.9374 - acc: 0.7105 - val_loss: 0.5349 - val_acc: 0.8700
Epoch 3/250
48000/48000 [==============================] - 0s - loss: 0.7037 - acc: 0.7837 - val_loss: 0.4238 - val_acc: 0.8876
Epoch 4/250
48000/48000 [==============================] - 2s - loss: 0.5976 - acc: 0.8191 - val_loss: 0.3684 - val_acc: 0.8996
Epoch 5/250
48000/48000 [==============================] - 1s - loss: 0.5325 - acc: 0.8419 - val_loss: 0.3368 - val_acc: 0.9058
Epoch 6/250
48000/48000 [==============================] - 1s - loss: 0.4910 - acc: 0.8544 - val_loss: 0.3117 - val_acc: 0.9112
Epoch 7/250
48000/48000 [==============================] - 1s - loss: 0.4580 - acc: 0.8647 - val_loss: 0.2939 - val_acc: 0.9156
Epoch 8/250
48000/48000 [==============================] - 1s - loss: 0.4297 - acc: 0.8727 - val_loss: 0.2795 - val_acc: 0.9194
Epoch 9/250
48000/48000 [==============================] - 1s - loss: 0.4088 - acc: 0.8786 - val_loss: 0.2674 - val_acc: 0.9228
Epoch 10/250
48000/48000 [==============================] - 1s - loss: 0.3912 - acc: 0.8840 - val_loss: 0.2568 - val_acc: 0.9258
Epoch 11/250
48000/48000 [==============================] - 1s - loss: 0.3735 - acc: 0.8911 - val_loss: 0.2469 - val_acc: 0.9282
Epoch 12/250
48000/48000 [==============================] - 1s - loss: 0.3611 - acc: 0.8946 - val_loss: 0.2396 - val_acc: 0.9309
Epoch 13/250
48000/48000 [==============================] - 1s - loss: 0.3511 - acc: 0.8964 - val_loss: 0.2310 - val_acc: 0.9328
Epoch 14/250
48000/48000 [==============================] - 1s - loss: 0.3383 - acc: 0.9019 - val_loss: 0.2234 - val_acc: 0.9353
Epoch 15/250
48000/48000 [==============================] - 1s - loss: 0.3267 - acc: 0.9032 - val_loss: 0.2179 - val_acc: 0.9375
Epoch 16/250
48000/48000 [==============================] - 1s - loss: 0.3189 - acc: 0.9060 - val_loss: 0.2112 - val_acc: 0.9387
Epoch 17/250
48000/48000 [==============================] - 1s - loss: 0.3078 - acc: 0.9101 - val_loss: 0.2066 - val_acc: 0.9408
Epoch 18/250
48000/48000 [==============================] - 1s - loss: 0.3019 - acc: 0.9120 - val_loss: 0.2013 - val_acc: 0.9415
Epoch 19/250
48000/48000 [==============================] - 1s - loss: 0.2945 - acc: 0.9149 - val_loss: 0.1959 - val_acc: 0.9425
Epoch 20/250
48000/48000 [==============================] - 1s - loss: 0.2876 - acc: 0.9165 - val_loss: 0.1917 - val_acc: 0.9444
Epoch 21/250
48000/48000 [==============================] - 0s - loss: 0.2816 - acc: 0.9177 - val_loss: 0.1872 - val_acc: 0.9456
Epoch 22/250
48000/48000 [==============================] - 2s - loss: 0.2737 - acc: 0.9200 - val_loss: 0.1840 - val_acc: 0.9460
Epoch 23/250
48000/48000 [==============================] - 1s - loss: 0.2655 - acc: 0.9226 - val_loss: 0.1794 - val_acc: 0.9483
Epoch 24/250
48000/48000 [==============================] - 1s - loss: 0.2585 - acc: 0.9245 - val_loss: 0.1773 - val_acc: 0.9493
Epoch 25/250
48000/48000 [==============================] - 1s - loss: 0.2580 - acc: 0.9234 - val_loss: 0.1736 - val_acc: 0.9503
Epoch 26/250
48000/48000 [==============================] - 1s - loss: 0.2518 - acc: 0.9266 - val_loss: 0.1699 - val_acc: 0.9511
Epoch 27/250
48000/48000 [==============================] - 1s - loss: 0.2445 - acc: 0.9290 - val_loss: 0.1678 - val_acc: 0.9523
Epoch 28/250
48000/48000 [==============================] - 1s - loss: 0.2432 - acc: 0.9291 - val_loss: 0.1656 - val_acc: 0.9530
Epoch 29/250
48000/48000 [==============================] - 1s - loss: 0.2392 - acc: 0.9302 - val_loss: 0.1622 - val_acc: 0.9543
Epoch 30/250
48000/48000 [==============================] - 1s - loss: 0.2343 - acc: 0.9319 - val_loss: 0.1600 - val_acc: 0.9538
Epoch 31/250
48000/48000 [==============================] - 1s - loss: 0.2321 - acc: 0.9321 - val_loss: 0.1575 - val_acc: 0.9550
Epoch 32/250
48000/48000 [==============================] - 1s - loss: 0.2283 - acc: 0.9320 - val_loss: 0.1548 - val_acc: 0.9559
Epoch 33/250
48000/48000 [==============================] - 1s - loss: 0.2227 - acc: 0.9343 - val_loss: 0.1531 - val_acc: 0.9562
Epoch 34/250
48000/48000 [==============================] - 1s - loss: 0.2203 - acc: 0.9357 - val_loss: 0.1509 - val_acc: 0.9565
Epoch 35/250
48000/48000 [==============================] - 1s - loss: 0.2179 - acc: 0.9363 - val_loss: 0.1493 - val_acc: 0.9571
Epoch 36/250
48000/48000 [==============================] - 1s - loss: 0.2117 - acc: 0.9374 - val_loss: 0.1470 - val_acc: 0.9577
Epoch 37/250
48000/48000 [==============================] - 1s - loss: 0.2110 - acc: 0.9392 - val_loss: 0.1461 - val_acc: 0.9581
Epoch 38/250
48000/48000 [==============================] - 1s - loss: 0.2082 - acc: 0.9378 - val_loss: 0.1437 - val_acc: 0.9584
Epoch 39/250
48000/48000 [==============================] - 1s - loss: 0.2041 - acc: 0.9399 - val_loss: 0.1423 - val_acc: 0.9586
Epoch 40/250
48000/48000 [==============================] - 1s - loss: 0.1977 - acc: 0.9420 - val_loss: 0.1407 - val_acc: 0.9593
Epoch 41/250
48000/48000 [==============================] - 1s - loss: 0.1992 - acc: 0.9418 - val_loss: 0.1389 - val_acc: 0.9597
Epoch 42/250
48000/48000 [==============================] - 1s - loss: 0.1979 - acc: 0.9422 - val_loss: 0.1379 - val_acc: 0.9607
Epoch 43/250
48000/48000 [==============================] - 1s - loss: 0.1920 - acc: 0.9436 - val_loss: 0.1353 - val_acc: 0.9607
Epoch 44/250
48000/48000 [==============================] - 1s - loss: 0.1918 - acc: 0.9438 - val_loss: 0.1347 - val_acc: 0.9610
Epoch 45/250
48000/48000 [==============================] - 1s - loss: 0.1928 - acc: 0.9430 - val_loss: 0.1343 - val_acc: 0.9617
Epoch 46/250
48000/48000 [==============================] - 0s - loss: 0.1860 - acc: 0.9449 - val_loss: 0.1316 - val_acc: 0.9618
Epoch 47/250
48000/48000 [==============================] - 2s - loss: 0.1828 - acc: 0.9451 - val_loss: 0.1305 - val_acc: 0.9623
Epoch 48/250
48000/48000 [==============================] - 1s - loss: 0.1827 - acc: 0.9468 - val_loss: 0.1295 - val_acc: 0.9627
Epoch 49/250
48000/48000 [==============================] - 1s - loss: 0.1791 - acc: 0.9471 - val_loss: 0.1281 - val_acc: 0.9629
Epoch 50/250
48000/48000 [==============================] - 1s - loss: 0.1768 - acc: 0.9482 - val_loss: 0.1268 - val_acc: 0.9639
Epoch 51/250
48000/48000 [==============================] - 1s - loss: 0.1747 - acc: 0.9481 - val_loss: 0.1263 - val_acc: 0.9635
Epoch 52/250
48000/48000 [==============================] - 1s - loss: 0.1766 - acc: 0.9476 - val_loss: 0.1249 - val_acc: 0.9640
Epoch 53/250
48000/48000 [==============================] - 1s - loss: 0.1750 - acc: 0.9491 - val_loss: 0.1238 - val_acc: 0.9647
Epoch 54/250
48000/48000 [==============================] - 1s - loss: 0.1732 - acc: 0.9487 - val_loss: 0.1233 - val_acc: 0.9644
Epoch 55/250
48000/48000 [==============================] - 1s - loss: 0.1736 - acc: 0.9484 - val_loss: 0.1219 - val_acc: 0.9646
Epoch 56/250
48000/48000 [==============================] - 1s - loss: 0.1690 - acc: 0.9506 - val_loss: 0.1205 - val_acc: 0.9651
Epoch 57/250
48000/48000 [==============================] - 1s - loss: 0.1665 - acc: 0.9505 - val_loss: 0.1198 - val_acc: 0.9660
Epoch 58/250
48000/48000 [==============================] - 1s - loss: 0.1654 - acc: 0.9508 - val_loss: 0.1197 - val_acc: 0.9652
Epoch 59/250
48000/48000 [==============================] - 1s - loss: 0.1623 - acc: 0.9523 - val_loss: 0.1179 - val_acc: 0.9665
Epoch 60/250
48000/48000 [==============================] - 1s - loss: 0.1601 - acc: 0.9537 - val_loss: 0.1178 - val_acc: 0.9662
Epoch 61/250
48000/48000 [==============================] - 1s - loss: 0.1619 - acc: 0.9516 - val_loss: 0.1165 - val_acc: 0.9671
Epoch 62/250
48000/48000 [==============================] - 1s - loss: 0.1588 - acc: 0.9531 - val_loss: 0.1156 - val_acc: 0.9670
Epoch 63/250
48000/48000 [==============================] - 1s - loss: 0.1550 - acc: 0.9534 - val_loss: 0.1150 - val_acc: 0.9672
Epoch 64/250
48000/48000 [==============================] - 0s - loss: 0.1543 - acc: 0.9537 - val_loss: 0.1141 - val_acc: 0.9677
Epoch 65/250
48000/48000 [==============================] - 2s - loss: 0.1560 - acc: 0.9534 - val_loss: 0.1138 - val_acc: 0.9675
Epoch 66/250
48000/48000 [==============================] - 1s - loss: 0.1512 - acc: 0.9548 - val_loss: 0.1130 - val_acc: 0.9682
Epoch 67/250
48000/48000 [==============================] - 1s - loss: 0.1504 - acc: 0.9548 - val_loss: 0.1132 - val_acc: 0.9677
Epoch 68/250
48000/48000 [==============================] - 1s - loss: 0.1487 - acc: 0.9554 - val_loss: 0.1112 - val_acc: 0.9681
Epoch 69/250
48000/48000 [==============================] - 1s - loss: 0.1480 - acc: 0.9562 - val_loss: 0.1109 - val_acc: 0.9690
Epoch 70/250
48000/48000 [==============================] - 1s - loss: 0.1467 - acc: 0.9563 - val_loss: 0.1099 - val_acc: 0.9693
Epoch 71/250
48000/48000 [==============================] - 1s - loss: 0.1465 - acc: 0.9555 - val_loss: 0.1095 - val_acc: 0.9688
Epoch 72/250
48000/48000 [==============================] - 1s - loss: 0.1459 - acc: 0.9568 - val_loss: 0.1089 - val_acc: 0.9692
Epoch 73/250
48000/48000 [==============================] - 1s - loss: 0.1460 - acc: 0.9571 - val_loss: 0.1084 - val_acc: 0.9692
Epoch 74/250
48000/48000 [==============================] - 1s - loss: 0.1444 - acc: 0.9578 - val_loss: 0.1076 - val_acc: 0.9695
Epoch 75/250
48000/48000 [==============================] - 1s - loss: 0.1423 - acc: 0.9587 - val_loss: 0.1074 - val_acc: 0.9694
Epoch 76/250
48000/48000 [==============================] - 1s - loss: 0.1399 - acc: 0.9585 - val_loss: 0.1066 - val_acc: 0.9695
Epoch 77/250
48000/48000 [==============================] - 1s - loss: 0.1407 - acc: 0.9579 - val_loss: 0.1064 - val_acc: 0.9702
Epoch 78/250
48000/48000 [==============================] - 1s - loss: 0.1398 - acc: 0.9585 - val_loss: 0.1053 - val_acc: 0.9696
Epoch 79/250
48000/48000 [==============================] - 1s - loss: 0.1374 - acc: 0.9592 - val_loss: 0.1049 - val_acc: 0.9702
Epoch 80/250
48000/48000 [==============================] - 1s - loss: 0.1360 - acc: 0.9596 - val_loss: 0.1046 - val_acc: 0.9703
Epoch 81/250
48000/48000 [==============================] - 1s - loss: 0.1362 - acc: 0.9593 - val_loss: 0.1040 - val_acc: 0.9707
Epoch 82/250
48000/48000 [==============================] - 1s - loss: 0.1356 - acc: 0.9598 - val_loss: 0.1036 - val_acc: 0.9708
Epoch 83/250
48000/48000 [==============================] - 1s - loss: 0.1314 - acc: 0.9610 - val_loss: 0.1030 - val_acc: 0.9702
Epoch 84/250
48000/48000 [==============================] - 1s - loss: 0.1333 - acc: 0.9609 - val_loss: 0.1027 - val_acc: 0.9708
Epoch 85/250
48000/48000 [==============================] - 1s - loss: 0.1298 - acc: 0.9611 - val_loss: 0.1021 - val_acc: 0.9707
Epoch 86/250
48000/48000 [==============================] - 1s - loss: 0.1305 - acc: 0.9611 - val_loss: 0.1011 - val_acc: 0.9716
Epoch 87/250
48000/48000 [==============================] - 1s - loss: 0.1285 - acc: 0.9619 - val_loss: 0.1012 - val_acc: 0.9710
Epoch 88/250
48000/48000 [==============================] - 1s - loss: 0.1280 - acc: 0.9619 - val_loss: 0.1009 - val_acc: 0.9707
Epoch 89/250
48000/48000 [==============================] - 1s - loss: 0.1293 - acc: 0.9613 - val_loss: 0.0998 - val_acc: 0.9710
Epoch 90/250
48000/48000 [==============================] - 1s - loss: 0.1262 - acc: 0.9629 - val_loss: 0.1004 - val_acc: 0.9712
Epoch 91/250
48000/48000 [==============================] - 1s - loss: 0.1298 - acc: 0.9603 - val_loss: 0.0997 - val_acc: 0.9711
Epoch 92/250
48000/48000 [==============================] - 1s - loss: 0.1268 - acc: 0.9624 - val_loss: 0.0987 - val_acc: 0.9713
Epoch 93/250
48000/48000 [==============================] - 0s - loss: 0.1228 - acc: 0.9635 - val_loss: 0.0987 - val_acc: 0.9721
Epoch 94/250
48000/48000 [==============================] - 2s - loss: 0.1235 - acc: 0.9632 - val_loss: 0.0983 - val_acc: 0.9718
Epoch 95/250
48000/48000 [==============================] - 1s - loss: 0.1227 - acc: 0.9636 - val_loss: 0.0978 - val_acc: 0.9716
Epoch 96/250
48000/48000 [==============================] - 1s - loss: 0.1233 - acc: 0.9630 - val_loss: 0.0981 - val_acc: 0.9718
Epoch 97/250
48000/48000 [==============================] - 1s - loss: 0.1220 - acc: 0.9640 - val_loss: 0.0977 - val_acc: 0.9717
Epoch 98/250
48000/48000 [==============================] - 1s - loss: 0.1221 - acc: 0.9632 - val_loss: 0.0964 - val_acc: 0.9728
Epoch 99/250
48000/48000 [==============================] - 1s - loss: 0.1215 - acc: 0.9641 - val_loss: 0.0964 - val_acc: 0.9728
Epoch 100/250
48000/48000 [==============================] - 1s - loss: 0.1166 - acc: 0.9651 - val_loss: 0.0959 - val_acc: 0.9727
Epoch 101/250
48000/48000 [==============================] - 1s - loss: 0.1169 - acc: 0.9651 - val_loss: 0.0961 - val_acc: 0.9731
Epoch 102/250
48000/48000 [==============================] - 1s - loss: 0.1171 - acc: 0.9650 - val_loss: 0.0955 - val_acc: 0.9730
Epoch 103/250
48000/48000 [==============================] - 1s - loss: 0.1158 - acc: 0.9647 - val_loss: 0.0948 - val_acc: 0.9727
Epoch 104/250
48000/48000 [==============================] - 1s - loss: 0.1160 - acc: 0.9658 - val_loss: 0.0949 - val_acc: 0.9733
Epoch 105/250
48000/48000 [==============================] - 1s - loss: 0.1166 - acc: 0.9651 - val_loss: 0.0940 - val_acc: 0.9728
Epoch 106/250
48000/48000 [==============================] - 1s - loss: 0.1123 - acc: 0.9661 - val_loss: 0.0936 - val_acc: 0.9730
Epoch 107/250
48000/48000 [==============================] - 1s - loss: 0.1143 - acc: 0.9654 - val_loss: 0.0941 - val_acc: 0.9728
Epoch 108/250
48000/48000 [==============================] - 1s - loss: 0.1127 - acc: 0.9663 - val_loss: 0.0939 - val_acc: 0.9719
Epoch 109/250
48000/48000 [==============================] - 1s - loss: 0.1130 - acc: 0.9668 - val_loss: 0.0934 - val_acc: 0.9733
Epoch 110/250
48000/48000 [==============================] - 1s - loss: 0.1125 - acc: 0.9662 - val_loss: 0.0928 - val_acc: 0.9734
Epoch 111/250
48000/48000 [==============================] - 1s - loss: 0.1105 - acc: 0.9663 - val_loss: 0.0927 - val_acc: 0.9734
Epoch 112/250
48000/48000 [==============================] - 1s - loss: 0.1111 - acc: 0.9669 - val_loss: 0.0927 - val_acc: 0.9738
Epoch 113/250
48000/48000 [==============================] - 1s - loss: 0.1094 - acc: 0.9673 - val_loss: 0.0923 - val_acc: 0.9744
Epoch 114/250
48000/48000 [==============================] - 1s - loss: 0.1106 - acc: 0.9659 - val_loss: 0.0913 - val_acc: 0.9738
Epoch 115/250
48000/48000 [==============================] - 1s - loss: 0.1080 - acc: 0.9672 - val_loss: 0.0918 - val_acc: 0.9743
Epoch 116/250
48000/48000 [==============================] - 1s - loss: 0.1075 - acc: 0.9673 - val_loss: 0.0911 - val_acc: 0.9738
Epoch 117/250
48000/48000 [==============================] - 1s - loss: 0.1068 - acc: 0.9677 - val_loss: 0.0909 - val_acc: 0.9743
Epoch 118/250
48000/48000 [==============================] - 1s - loss: 0.1087 - acc: 0.9666 - val_loss: 0.0905 - val_acc: 0.9747
Epoch 119/250
48000/48000 [==============================] - 1s - loss: 0.1072 - acc: 0.9685 - val_loss: 0.0906 - val_acc: 0.9743
Epoch 120/250
48000/48000 [==============================] - 1s - loss: 0.1089 - acc: 0.9675 - val_loss: 0.0904 - val_acc: 0.9744
Epoch 121/250
48000/48000 [==============================] - 1s - loss: 0.1031 - acc: 0.9685 - val_loss: 0.0900 - val_acc: 0.9736
Epoch 122/250
48000/48000 [==============================] - 1s - loss: 0.1062 - acc: 0.9679 - val_loss: 0.0903 - val_acc: 0.9743
Epoch 123/250
48000/48000 [==============================] - 1s - loss: 0.1041 - acc: 0.9688 - val_loss: 0.0898 - val_acc: 0.9745
Epoch 124/250
48000/48000 [==============================] - 1s - loss: 0.1037 - acc: 0.9698 - val_loss: 0.0898 - val_acc: 0.9750
Epoch 125/250
48000/48000 [==============================] - 1s - loss: 0.1029 - acc: 0.9688 - val_loss: 0.0896 - val_acc: 0.9747
Epoch 126/250
48000/48000 [==============================] - 1s - loss: 0.1042 - acc: 0.9682 - val_loss: 0.0891 - val_acc: 0.9751
Epoch 127/250
48000/48000 [==============================] - 1s - loss: 0.0999 - acc: 0.9705 - val_loss: 0.0890 - val_acc: 0.9748
Epoch 128/250
48000/48000 [==============================] - 1s - loss: 0.1012 - acc: 0.9697 - val_loss: 0.0885 - val_acc: 0.9748
Epoch 129/250
48000/48000 [==============================] - 0s - loss: 0.0994 - acc: 0.9693 - val_loss: 0.0888 - val_acc: 0.9744
Epoch 130/250
48000/48000 [==============================] - 2s - loss: 0.1020 - acc: 0.9695 - val_loss: 0.0887 - val_acc: 0.9748
Epoch 131/250
48000/48000 [==============================] - 1s - loss: 0.0993 - acc: 0.9699 - val_loss: 0.0886 - val_acc: 0.9745
Epoch 132/250
48000/48000 [==============================] - 1s - loss: 0.0973 - acc: 0.9707 - val_loss: 0.0878 - val_acc: 0.9747
Epoch 133/250
48000/48000 [==============================] - 1s - loss: 0.0966 - acc: 0.9709 - val_loss: 0.0880 - val_acc: 0.9748
Epoch 134/250
48000/48000 [==============================] - 1s - loss: 0.0982 - acc: 0.9700 - val_loss: 0.0877 - val_acc: 0.9754
Epoch 135/250
48000/48000 [==============================] - 1s - loss: 0.0962 - acc: 0.9713 - val_loss: 0.0872 - val_acc: 0.9752
Epoch 136/250
48000/48000 [==============================] - 1s - loss: 0.0992 - acc: 0.9698 - val_loss: 0.0874 - val_acc: 0.9752
Epoch 137/250
48000/48000 [==============================] - 1s - loss: 0.0973 - acc: 0.9705 - val_loss: 0.0872 - val_acc: 0.9747
Epoch 138/250
48000/48000 [==============================] - 1s - loss: 0.0943 - acc: 0.9715 - val_loss: 0.0869 - val_acc: 0.9752
Epoch 139/250
48000/48000 [==============================] - 1s - loss: 0.0956 - acc: 0.9716 - val_loss: 0.0870 - val_acc: 0.9751
Epoch 140/250
48000/48000 [==============================] - 1s - loss: 0.0966 - acc: 0.9703 - val_loss: 0.0872 - val_acc: 0.9750
Epoch 141/250
48000/48000 [==============================] - 1s - loss: 0.0968 - acc: 0.9707 - val_loss: 0.0865 - val_acc: 0.9755
Epoch 142/250
48000/48000 [==============================] - 1s - loss: 0.0952 - acc: 0.9719 - val_loss: 0.0866 - val_acc: 0.9754
Epoch 143/250
48000/48000 [==============================] - 1s - loss: 0.0943 - acc: 0.9718 - val_loss: 0.0868 - val_acc: 0.9756
Epoch 144/250
48000/48000 [==============================] - 1s - loss: 0.0911 - acc: 0.9720 - val_loss: 0.0865 - val_acc: 0.9753
Epoch 145/250
48000/48000 [==============================] - 1s - loss: 0.0957 - acc: 0.9699 - val_loss: 0.0860 - val_acc: 0.9756
Epoch 146/250
48000/48000 [==============================] - 1s - loss: 0.0956 - acc: 0.9705 - val_loss: 0.0856 - val_acc: 0.9756
Epoch 147/250
48000/48000 [==============================] - 0s - loss: 0.0908 - acc: 0.9720 - val_loss: 0.0859 - val_acc: 0.9756
Epoch 148/250
48000/48000 [==============================] - 2s - loss: 0.0901 - acc: 0.9720 - val_loss: 0.0862 - val_acc: 0.9758
Epoch 149/250
48000/48000 [==============================] - 1s - loss: 0.0929 - acc: 0.9716 - val_loss: 0.0853 - val_acc: 0.9757
Epoch 150/250
48000/48000 [==============================] - 1s - loss: 0.0926 - acc: 0.9720 - val_loss: 0.0856 - val_acc: 0.9758
Epoch 151/250
48000/48000 [==============================] - 1s - loss: 0.0894 - acc: 0.9736 - val_loss: 0.0853 - val_acc: 0.9758
Epoch 152/250
48000/48000 [==============================] - 1s - loss: 0.0891 - acc: 0.9719 - val_loss: 0.0855 - val_acc: 0.9758
Epoch 153/250
48000/48000 [==============================] - 1s - loss: 0.0901 - acc: 0.9722 - val_loss: 0.0854 - val_acc: 0.9755
Epoch 154/250
48000/48000 [==============================] - 1s - loss: 0.0924 - acc: 0.9719 - val_loss: 0.0854 - val_acc: 0.9763
Epoch 155/250
48000/48000 [==============================] - 1s - loss: 0.0886 - acc: 0.9731 - val_loss: 0.0847 - val_acc: 0.9760
Epoch 156/250
48000/48000 [==============================] - 1s - loss: 0.0895 - acc: 0.9722 - val_loss: 0.0846 - val_acc: 0.9759
Epoch 157/250
48000/48000 [==============================] - 1s - loss: 0.0884 - acc: 0.9722 - val_loss: 0.0842 - val_acc: 0.9764
Epoch 158/250
48000/48000 [==============================] - 1s - loss: 0.0902 - acc: 0.9713 - val_loss: 0.0842 - val_acc: 0.9766
Epoch 159/250
48000/48000 [==============================] - 1s - loss: 0.0897 - acc: 0.9723 - val_loss: 0.0844 - val_acc: 0.9763
Epoch 160/250
48000/48000 [==============================] - 1s - loss: 0.0892 - acc: 0.9735 - val_loss: 0.0844 - val_acc: 0.9758
Epoch 161/250
48000/48000 [==============================] - 1s - loss: 0.0870 - acc: 0.9728 - val_loss: 0.0838 - val_acc: 0.9754
Epoch 162/250
48000/48000 [==============================] - 1s - loss: 0.0865 - acc: 0.9733 - val_loss: 0.0842 - val_acc: 0.9762
Epoch 163/250
48000/48000 [==============================] - 1s - loss: 0.0856 - acc: 0.9739 - val_loss: 0.0840 - val_acc: 0.9762
Epoch 164/250
48000/48000 [==============================] - 1s - loss: 0.0868 - acc: 0.9730 - val_loss: 0.0840 - val_acc: 0.9762
Epoch 165/250
48000/48000 [==============================] - 1s - loss: 0.0838 - acc: 0.9741 - val_loss: 0.0836 - val_acc: 0.9760
Epoch 166/250
48000/48000 [==============================] - 1s - loss: 0.0857 - acc: 0.9739 - val_loss: 0.0839 - val_acc: 0.9757
Epoch 167/250
48000/48000 [==============================] - 1s - loss: 0.0858 - acc: 0.9740 - val_loss: 0.0833 - val_acc: 0.9762
Epoch 168/250
48000/48000 [==============================] - 1s - loss: 0.0864 - acc: 0.9738 - val_loss: 0.0831 - val_acc: 0.9764
Epoch 169/250
48000/48000 [==============================] - 1s - loss: 0.0824 - acc: 0.9748 - val_loss: 0.0830 - val_acc: 0.9758
Epoch 170/250
48000/48000 [==============================] - 1s - loss: 0.0838 - acc: 0.9741 - val_loss: 0.0833 - val_acc: 0.9757
Epoch 171/250
48000/48000 [==============================] - 1s - loss: 0.0846 - acc: 0.9742 - val_loss: 0.0830 - val_acc: 0.9760
Epoch 172/250
48000/48000 [==============================] - 1s - loss: 0.0844 - acc: 0.9751 - val_loss: 0.0827 - val_acc: 0.9759
Epoch 173/250
48000/48000 [==============================] - 1s - loss: 0.0844 - acc: 0.9747 - val_loss: 0.0826 - val_acc: 0.9762
Epoch 174/250
48000/48000 [==============================] - 1s - loss: 0.0816 - acc: 0.9749 - val_loss: 0.0826 - val_acc: 0.9762
Epoch 175/250
48000/48000 [==============================] - 1s - loss: 0.0831 - acc: 0.9739 - val_loss: 0.0825 - val_acc: 0.9758
Epoch 176/250
48000/48000 [==============================] - 1s - loss: 0.0806 - acc: 0.9750 - val_loss: 0.0821 - val_acc: 0.9768
Epoch 177/250
48000/48000 [==============================] - 1s - loss: 0.0823 - acc: 0.9751 - val_loss: 0.0824 - val_acc: 0.9762
Epoch 178/250
48000/48000 [==============================] - 1s - loss: 0.0802 - acc: 0.9754 - val_loss: 0.0820 - val_acc: 0.9766
Epoch 179/250
48000/48000 [==============================] - 1s - loss: 0.0802 - acc: 0.9756 - val_loss: 0.0821 - val_acc: 0.9758
Epoch 180/250
48000/48000 [==============================] - 1s - loss: 0.0800 - acc: 0.9755 - val_loss: 0.0817 - val_acc: 0.9766
Epoch 181/250
48000/48000 [==============================] - 1s - loss: 0.0808 - acc: 0.9750 - val_loss: 0.0821 - val_acc: 0.9767
Epoch 182/250
48000/48000 [==============================] - 1s - loss: 0.0796 - acc: 0.9759 - val_loss: 0.0818 - val_acc: 0.9768
Epoch 183/250
48000/48000 [==============================] - 1s - loss: 0.0790 - acc: 0.9764 - val_loss: 0.0816 - val_acc: 0.9763
Epoch 184/250
48000/48000 [==============================] - 1s - loss: 0.0806 - acc: 0.9750 - val_loss: 0.0813 - val_acc: 0.9766
Epoch 185/250
48000/48000 [==============================] - 1s - loss: 0.0798 - acc: 0.9758 - val_loss: 0.0820 - val_acc: 0.9763
Epoch 186/250
48000/48000 [==============================] - 1s - loss: 0.0773 - acc: 0.9763 - val_loss: 0.0819 - val_acc: 0.9761
Epoch 187/250
48000/48000 [==============================] - 1s - loss: 0.0809 - acc: 0.9754 - val_loss: 0.0814 - val_acc: 0.9762
Epoch 188/250
48000/48000 [==============================] - 1s - loss: 0.0795 - acc: 0.9749 - val_loss: 0.0812 - val_acc: 0.9765
Epoch 189/250
48000/48000 [==============================] - 1s - loss: 0.0797 - acc: 0.9754 - val_loss: 0.0814 - val_acc: 0.9762
Epoch 190/250
48000/48000 [==============================] - 1s - loss: 0.0777 - acc: 0.9761 - val_loss: 0.0809 - val_acc: 0.9764
Epoch 191/250
48000/48000 [==============================] - 1s - loss: 0.0777 - acc: 0.9759 - val_loss: 0.0814 - val_acc: 0.9768
Epoch 192/250
48000/48000 [==============================] - 1s - loss: 0.0768 - acc: 0.9764 - val_loss: 0.0818 - val_acc: 0.9768
Epoch 193/250
48000/48000 [==============================] - 1s - loss: 0.0753 - acc: 0.9765 - val_loss: 0.0810 - val_acc: 0.9772
Epoch 194/250
48000/48000 [==============================] - 0s - loss: 0.0765 - acc: 0.9768 - val_loss: 0.0812 - val_acc: 0.9768
Epoch 195/250
48000/48000 [==============================] - 2s - loss: 0.0752 - acc: 0.9767 - val_loss: 0.0808 - val_acc: 0.9768
Epoch 196/250
48000/48000 [==============================] - 1s - loss: 0.0731 - acc: 0.9775 - val_loss: 0.0807 - val_acc: 0.9766
Epoch 197/250
48000/48000 [==============================] - 1s - loss: 0.0763 - acc: 0.9761 - val_loss: 0.0809 - val_acc: 0.9768
Epoch 198/250
48000/48000 [==============================] - 1s - loss: 0.0739 - acc: 0.9767 - val_loss: 0.0814 - val_acc: 0.9764
Epoch 199/250
48000/48000 [==============================] - 1s - loss: 0.0732 - acc: 0.9772 - val_loss: 0.0807 - val_acc: 0.9772
Epoch 200/250
48000/48000 [==============================] - 1s - loss: 0.0756 - acc: 0.9762 - val_loss: 0.0800 - val_acc: 0.9774
Epoch 201/250
48000/48000 [==============================] - 1s - loss: 0.0755 - acc: 0.9760 - val_loss: 0.0799 - val_acc: 0.9768
Epoch 202/250
48000/48000 [==============================] - 1s - loss: 0.0727 - acc: 0.9775 - val_loss: 0.0805 - val_acc: 0.9768
Epoch 203/250
48000/48000 [==============================] - 1s - loss: 0.0742 - acc: 0.9767 - val_loss: 0.0800 - val_acc: 0.9770
Epoch 204/250
48000/48000 [==============================] - 1s - loss: 0.0745 - acc: 0.9770 - val_loss: 0.0801 - val_acc: 0.9770
Epoch 205/250
48000/48000 [==============================] - 1s - loss: 0.0705 - acc: 0.9789 - val_loss: 0.0796 - val_acc: 0.9771
Epoch 206/250
48000/48000 [==============================] - 1s - loss: 0.0734 - acc: 0.9771 - val_loss: 0.0795 - val_acc: 0.9772
Epoch 207/250
48000/48000 [==============================] - 1s - loss: 0.0728 - acc: 0.9781 - val_loss: 0.0798 - val_acc: 0.9772
Epoch 208/250
48000/48000 [==============================] - 1s - loss: 0.0721 - acc: 0.9782 - val_loss: 0.0805 - val_acc: 0.9768
Epoch 209/250
48000/48000 [==============================] - 1s - loss: 0.0733 - acc: 0.9773 - val_loss: 0.0797 - val_acc: 0.9775
Epoch 210/250
48000/48000 [==============================] - 1s - loss: 0.0720 - acc: 0.9779 - val_loss: 0.0798 - val_acc: 0.9774
Epoch 211/250
48000/48000 [==============================] - 1s - loss: 0.0747 - acc: 0.9770 - val_loss: 0.0797 - val_acc: 0.9771
Epoch 212/250
48000/48000 [==============================] - 1s - loss: 0.0709 - acc: 0.9776 - val_loss: 0.0799 - val_acc: 0.9775
Epoch 213/250
48000/48000 [==============================] - 1s - loss: 0.0730 - acc: 0.9774 - val_loss: 0.0796 - val_acc: 0.9776
Epoch 214/250
48000/48000 [==============================] - 1s - loss: 0.0704 - acc: 0.9792 - val_loss: 0.0796 - val_acc: 0.9769
Epoch 215/250
48000/48000 [==============================] - 1s - loss: 0.0718 - acc: 0.9773 - val_loss: 0.0804 - val_acc: 0.9768
Epoch 216/250
48000/48000 [==============================] - 1s - loss: 0.0703 - acc: 0.9779 - val_loss: 0.0792 - val_acc: 0.9767
Epoch 217/250
48000/48000 [==============================] - 1s - loss: 0.0697 - acc: 0.9780 - val_loss: 0.0797 - val_acc: 0.9765
Epoch 218/250
48000/48000 [==============================] - 1s - loss: 0.0709 - acc: 0.9782 - val_loss: 0.0791 - val_acc: 0.9771
Epoch 219/250
48000/48000 [==============================] - 1s - loss: 0.0691 - acc: 0.9787 - val_loss: 0.0799 - val_acc: 0.9768
Epoch 220/250
48000/48000 [==============================] - 1s - loss: 0.0679 - acc: 0.9794 - val_loss: 0.0794 - val_acc: 0.9770
Epoch 221/250
48000/48000 [==============================] - 1s - loss: 0.0691 - acc: 0.9780 - val_loss: 0.0796 - val_acc: 0.9769
Epoch 222/250
48000/48000 [==============================] - 1s - loss: 0.0680 - acc: 0.9790 - val_loss: 0.0789 - val_acc: 0.9774
Epoch 223/250
48000/48000 [==============================] - 1s - loss: 0.0676 - acc: 0.9785 - val_loss: 0.0790 - val_acc: 0.9776
Epoch 224/250
48000/48000 [==============================] - 1s - loss: 0.0675 - acc: 0.9795 - val_loss: 0.0795 - val_acc: 0.9770
Epoch 225/250
48000/48000 [==============================] - 1s - loss: 0.0671 - acc: 0.9792 - val_loss: 0.0793 - val_acc: 0.9774
Epoch 226/250
48000/48000 [==============================] - 1s - loss: 0.0691 - acc: 0.9774 - val_loss: 0.0789 - val_acc: 0.9771
Epoch 227/250
48000/48000 [==============================] - 1s - loss: 0.0671 - acc: 0.9796 - val_loss: 0.0792 - val_acc: 0.9776
Epoch 228/250
48000/48000 [==============================] - 1s - loss: 0.0660 - acc: 0.9784 - val_loss: 0.0787 - val_acc: 0.9772
Epoch 229/250
48000/48000 [==============================] - 1s - loss: 0.0676 - acc: 0.9792 - val_loss: 0.0794 - val_acc: 0.9776
Epoch 230/250
48000/48000 [==============================] - 1s - loss: 0.0677 - acc: 0.9789 - val_loss: 0.0791 - val_acc: 0.9774
Epoch 231/250
48000/48000 [==============================] - 1s - loss: 0.0662 - acc: 0.9792 - val_loss: 0.0789 - val_acc: 0.9773
Epoch 232/250
48000/48000 [==============================] - 1s - loss: 0.0676 - acc: 0.9791 - val_loss: 0.0795 - val_acc: 0.9772
Epoch 233/250
48000/48000 [==============================] - 1s - loss: 0.0682 - acc: 0.9789 - val_loss: 0.0792 - val_acc: 0.9773
Epoch 234/250
48000/48000 [==============================] - 0s - loss: 0.0659 - acc: 0.9796 - val_loss: 0.0793 - val_acc: 0.9769
Epoch 235/250
48000/48000 [==============================] - 2s - loss: 0.0667 - acc: 0.9794 - val_loss: 0.0786 - val_acc: 0.9775
Epoch 236/250
48000/48000 [==============================] - 1s - loss: 0.0665 - acc: 0.9789 - val_loss: 0.0788 - val_acc: 0.9778
Epoch 237/250
48000/48000 [==============================] - 1s - loss: 0.0669 - acc: 0.9801 - val_loss: 0.0787 - val_acc: 0.9775
Epoch 238/250
48000/48000 [==============================] - 1s - loss: 0.0658 - acc: 0.9797 - val_loss: 0.0784 - val_acc: 0.9777
Epoch 239/250
48000/48000 [==============================] - 1s - loss: 0.0645 - acc: 0.9799 - val_loss: 0.0784 - val_acc: 0.9772
Epoch 240/250
48000/48000 [==============================] - 1s - loss: 0.0648 - acc: 0.9796 - val_loss: 0.0787 - val_acc: 0.9773
Epoch 241/250
48000/48000 [==============================] - 1s - loss: 0.0649 - acc: 0.9790 - val_loss: 0.0784 - val_acc: 0.9777
Epoch 242/250
48000/48000 [==============================] - 1s - loss: 0.0645 - acc: 0.9803 - val_loss: 0.0786 - val_acc: 0.9774
Epoch 243/250
48000/48000 [==============================] - 1s - loss: 0.0637 - acc: 0.9805 - val_loss: 0.0786 - val_acc: 0.9775
Epoch 244/250
48000/48000 [==============================] - 1s - loss: 0.0626 - acc: 0.9806 - val_loss: 0.0784 - val_acc: 0.9775
Epoch 245/250
48000/48000 [==============================] - 0s - loss: 0.0646 - acc: 0.9792 - val_loss: 0.0780 - val_acc: 0.9778
Epoch 246/250
48000/48000 [==============================] - 1s - loss: 0.0646 - acc: 0.9800 - val_loss: 0.0775 - val_acc: 0.9776
Epoch 247/250
48000/48000 [==============================] - 1s - loss: 0.0630 - acc: 0.9801 - val_loss: 0.0779 - val_acc: 0.9778
Epoch 248/250
48000/48000 [==============================] - 1s - loss: 0.0619 - acc: 0.9806 - val_loss: 0.0780 - val_acc: 0.9782
Epoch 249/250
48000/48000 [==============================] - 2s - loss: 0.0628 - acc: 0.9806 - val_loss: 0.0784 - val_acc: 0.9780
Epoch 250/250
48000/48000 [==============================] - 1s - loss: 0.0635 - acc: 0.9801 - val_loss: 0.0779 - val_acc: 0.9781
 9376/10000 [===========================>..] - ETA: 0s
Test score: 0.0753441467251
Test accuracy: 0.9778
In [6]:
# list all data in history
print(history.history.keys())
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
dict_keys(['val_acc', 'val_loss', 'loss', 'acc'])
In [7]:
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
In [ ]: