from __future__ import print_function
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
np.random.seed(1671) # for reproducibility
Using CNTK backend
# network and training
NB_EPOCH = 200
BATCH_SIZE = 128
VERBOSE = 1
NB_CLASSES = 10 # number of outputs = number of digits
OPTIMIZER = SGD() # SGD optimizer, explained later in this chapter
N_HIDDEN = 128
VALIDATION_SPLIT=0.2 # how much TRAIN is reserved for VALIDATION
# data: shuffled and split between train and test sets
#
(X_train, y_train), (X_test, y_test) = mnist.load_data()
#X_train is 60000 rows of 28x28 values --> reshaped in 60000 x 784
RESHAPED = 784
#
X_train = X_train.reshape(60000, RESHAPED)
X_test = X_test.reshape(10000, RESHAPED)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
# normalize
#
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, NB_CLASSES)
Y_test = np_utils.to_categorical(y_test, NB_CLASSES)
60000 train samples 10000 test samples
# 10 outputs
# final stage is softmax
model = Sequential()
model.add(Dense(NB_CLASSES, input_shape=(RESHAPED,)))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=OPTIMIZER,
metrics=['accuracy'])
history = model.fit(X_train, Y_train,
batch_size=BATCH_SIZE, epochs=NB_EPOCH,
verbose=VERBOSE, validation_split=VALIDATION_SPLIT)
score = model.evaluate(X_test, Y_test, verbose=VERBOSE)
print("\nTest score:", score[0])
print('Test accuracy:', score[1])
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 10) 7850 _________________________________________________________________ activation_1 (Activation) (None, 10) 0 ================================================================= Total params: 7,850 Trainable params: 7,850 Non-trainable params: 0 _________________________________________________________________ Train on 48000 samples, validate on 12000 samples Epoch 1/200 8192/48000 [====>.........................] - ETA: 0s - loss: 2.1131 - acc: 0.2932
/home/dadebarr/anaconda3/lib/python3.5/site-packages/cntk/core.py:351: UserWarning: your data is of type "float64", but your input variable (uid "Input27") expects "<class 'numpy.float32'>". Please convert your data beforehand to speed up training. (sample.dtype, var.uid, str(var.dtype)))
48000/48000 [==============================] - 1s - loss: 1.4009 - acc: 0.6553 - val_loss: 0.8981 - val_acc: 0.8226 Epoch 2/200 48000/48000 [==============================] - 1s - loss: 0.7959 - acc: 0.8244 - val_loss: 0.6581 - val_acc: 0.8561 Epoch 3/200 48000/48000 [==============================] - 1s - loss: 0.6451 - acc: 0.8480 - val_loss: 0.5622 - val_acc: 0.8695 Epoch 4/200 48000/48000 [==============================] - 1s - loss: 0.5723 - acc: 0.8601 - val_loss: 0.5091 - val_acc: 0.8786 Epoch 5/200 48000/48000 [==============================] - 0s - loss: 0.5279 - acc: 0.8679 - val_loss: 0.4750 - val_acc: 0.8840 Epoch 6/200 48000/48000 [==============================] - 1s - loss: 0.4974 - acc: 0.8733 - val_loss: 0.4508 - val_acc: 0.8886 Epoch 7/200 48000/48000 [==============================] - 1s - loss: 0.4748 - acc: 0.8771 - val_loss: 0.4325 - val_acc: 0.8919 Epoch 8/200 48000/48000 [==============================] - 1s - loss: 0.4574 - acc: 0.8804 - val_loss: 0.4182 - val_acc: 0.8946 Epoch 9/200 48000/48000 [==============================] - 1s - loss: 0.4433 - acc: 0.8834 - val_loss: 0.4067 - val_acc: 0.8963 Epoch 10/200 48000/48000 [==============================] - 0s - loss: 0.4316 - acc: 0.8854 - val_loss: 0.3972 - val_acc: 0.8984 Epoch 11/200 48000/48000 [==============================] - 1s - loss: 0.4217 - acc: 0.8874 - val_loss: 0.3889 - val_acc: 0.8992 Epoch 12/200 48000/48000 [==============================] - 1s - loss: 0.4132 - acc: 0.8896 - val_loss: 0.3820 - val_acc: 0.9011 Epoch 13/200 48000/48000 [==============================] - 1s - loss: 0.4059 - acc: 0.8906 - val_loss: 0.3759 - val_acc: 0.9013 Epoch 14/200 48000/48000 [==============================] - 0s - loss: 0.3993 - acc: 0.8922 - val_loss: 0.3705 - val_acc: 0.9024 Epoch 15/200 48000/48000 [==============================] - 1s - loss: 0.3935 - acc: 0.8935 - val_loss: 0.3658 - val_acc: 0.9031 Epoch 16/200 48000/48000 [==============================] - 1s - loss: 0.3883 - acc: 0.8943 - val_loss: 0.3614 - val_acc: 0.9036 Epoch 17/200 48000/48000 [==============================] - 1s - loss: 0.3836 - acc: 0.8960 - val_loss: 0.3577 - val_acc: 0.9037 Epoch 18/200 48000/48000 [==============================] - 1s - loss: 0.3793 - acc: 0.8965 - val_loss: 0.3541 - val_acc: 0.9050 Epoch 19/200 48000/48000 [==============================] - 0s - loss: 0.3754 - acc: 0.8975 - val_loss: 0.3509 - val_acc: 0.9053 Epoch 20/200 48000/48000 [==============================] - 1s - loss: 0.3717 - acc: 0.8983 - val_loss: 0.3480 - val_acc: 0.9056 Epoch 21/200 48000/48000 [==============================] - 1s - loss: 0.3684 - acc: 0.8992 - val_loss: 0.3451 - val_acc: 0.9064 Epoch 22/200 48000/48000 [==============================] - 1s - loss: 0.3653 - acc: 0.8998 - val_loss: 0.3426 - val_acc: 0.9078 Epoch 23/200 48000/48000 [==============================] - 1s - loss: 0.3624 - acc: 0.9001 - val_loss: 0.3402 - val_acc: 0.9076 Epoch 24/200 48000/48000 [==============================] - 0s - loss: 0.3597 - acc: 0.9009 - val_loss: 0.3380 - val_acc: 0.9078 Epoch 25/200 48000/48000 [==============================] - 1s - loss: 0.3571 - acc: 0.9014 - val_loss: 0.3360 - val_acc: 0.9078 Epoch 26/200 48000/48000 [==============================] - 1s - loss: 0.3547 - acc: 0.9017 - val_loss: 0.3339 - val_acc: 0.9079 Epoch 27/200 48000/48000 [==============================] - 1s - loss: 0.3525 - acc: 0.9025 - val_loss: 0.3320 - val_acc: 0.9090 Epoch 28/200 48000/48000 [==============================] - 1s - loss: 0.3503 - acc: 0.9029 - val_loss: 0.3305 - val_acc: 0.9093 Epoch 29/200 48000/48000 [==============================] - 1s - loss: 0.3483 - acc: 0.9036 - val_loss: 0.3288 - val_acc: 0.9090 Epoch 30/200 48000/48000 [==============================] - 1s - loss: 0.3464 - acc: 0.9039 - val_loss: 0.3272 - val_acc: 0.9098 Epoch 31/200 48000/48000 [==============================] - 1s - loss: 0.3446 - acc: 0.9043 - val_loss: 0.3259 - val_acc: 0.9096 Epoch 32/200 48000/48000 [==============================] - 1s - loss: 0.3429 - acc: 0.9049 - val_loss: 0.3245 - val_acc: 0.9104 Epoch 33/200 48000/48000 [==============================] - 0s - loss: 0.3412 - acc: 0.9057 - val_loss: 0.3231 - val_acc: 0.9105 Epoch 34/200 48000/48000 [==============================] - 1s - loss: 0.3396 - acc: 0.9059 - val_loss: 0.3217 - val_acc: 0.9106 Epoch 35/200 48000/48000 [==============================] - 1s - loss: 0.3381 - acc: 0.9064 - val_loss: 0.3206 - val_acc: 0.9110 Epoch 36/200 48000/48000 [==============================] - 1s - loss: 0.3366 - acc: 0.9068 - val_loss: 0.3194 - val_acc: 0.9113 Epoch 37/200 48000/48000 [==============================] - 1s - loss: 0.3353 - acc: 0.9071 - val_loss: 0.3182 - val_acc: 0.9112 Epoch 38/200 48000/48000 [==============================] - 0s - loss: 0.3339 - acc: 0.9077 - val_loss: 0.3173 - val_acc: 0.9120 Epoch 39/200 48000/48000 [==============================] - 1s - loss: 0.3326 - acc: 0.9077 - val_loss: 0.3162 - val_acc: 0.9126 Epoch 40/200 48000/48000 [==============================] - 1s - loss: 0.3314 - acc: 0.9084 - val_loss: 0.3153 - val_acc: 0.9124 Epoch 41/200 48000/48000 [==============================] - 1s - loss: 0.3302 - acc: 0.9086 - val_loss: 0.3142 - val_acc: 0.9129 Epoch 42/200 48000/48000 [==============================] - 0s - loss: 0.3291 - acc: 0.9089 - val_loss: 0.3133 - val_acc: 0.9134 Epoch 43/200 48000/48000 [==============================] - 1s - loss: 0.3280 - acc: 0.9094 - val_loss: 0.3124 - val_acc: 0.9138 Epoch 44/200 48000/48000 [==============================] - 1s - loss: 0.3269 - acc: 0.9096 - val_loss: 0.3117 - val_acc: 0.9136 Epoch 45/200 48000/48000 [==============================] - 1s - loss: 0.3259 - acc: 0.9097 - val_loss: 0.3108 - val_acc: 0.9136 Epoch 46/200 48000/48000 [==============================] - 1s - loss: 0.3249 - acc: 0.9102 - val_loss: 0.3101 - val_acc: 0.9146 Epoch 47/200 48000/48000 [==============================] - 0s - loss: 0.3239 - acc: 0.9105 - val_loss: 0.3093 - val_acc: 0.9148 Epoch 48/200 48000/48000 [==============================] - 1s - loss: 0.3230 - acc: 0.9106 - val_loss: 0.3085 - val_acc: 0.9148 Epoch 49/200 48000/48000 [==============================] - 1s - loss: 0.3220 - acc: 0.9109 - val_loss: 0.3078 - val_acc: 0.9153 Epoch 50/200 48000/48000 [==============================] - 1s - loss: 0.3212 - acc: 0.9114 - val_loss: 0.3071 - val_acc: 0.9152 Epoch 51/200 48000/48000 [==============================] - 1s - loss: 0.3203 - acc: 0.9114 - val_loss: 0.3065 - val_acc: 0.9154 Epoch 52/200 48000/48000 [==============================] - 0s - loss: 0.3194 - acc: 0.9116 - val_loss: 0.3059 - val_acc: 0.9153 Epoch 53/200 48000/48000 [==============================] - 1s - loss: 0.3187 - acc: 0.9117 - val_loss: 0.3053 - val_acc: 0.9149 Epoch 54/200 48000/48000 [==============================] - 1s - loss: 0.3179 - acc: 0.9121 - val_loss: 0.3046 - val_acc: 0.9159 Epoch 55/200 48000/48000 [==============================] - 1s - loss: 0.3171 - acc: 0.9124 - val_loss: 0.3040 - val_acc: 0.9164 Epoch 56/200 48000/48000 [==============================] - 1s - loss: 0.3164 - acc: 0.9126 - val_loss: 0.3033 - val_acc: 0.9165 Epoch 57/200 48000/48000 [==============================] - 0s - loss: 0.3156 - acc: 0.9129 - val_loss: 0.3029 - val_acc: 0.9160 Epoch 58/200 48000/48000 [==============================] - 1s - loss: 0.3149 - acc: 0.9131 - val_loss: 0.3023 - val_acc: 0.9165 Epoch 59/200 48000/48000 [==============================] - 1s - loss: 0.3142 - acc: 0.9131 - val_loss: 0.3019 - val_acc: 0.9161 Epoch 60/200 48000/48000 [==============================] - 1s - loss: 0.3135 - acc: 0.9135 - val_loss: 0.3013 - val_acc: 0.9168 Epoch 61/200 48000/48000 [==============================] - 0s - loss: 0.3129 - acc: 0.9136 - val_loss: 0.3006 - val_acc: 0.9168 Epoch 62/200 48000/48000 [==============================] - 1s - loss: 0.3122 - acc: 0.9138 - val_loss: 0.3002 - val_acc: 0.9166 Epoch 63/200 48000/48000 [==============================] - 1s - loss: 0.3116 - acc: 0.9140 - val_loss: 0.2998 - val_acc: 0.9170 Epoch 64/200 48000/48000 [==============================] - 1s - loss: 0.3110 - acc: 0.9141 - val_loss: 0.2992 - val_acc: 0.9171 Epoch 65/200 48000/48000 [==============================] - 1s - loss: 0.3104 - acc: 0.9141 - val_loss: 0.2988 - val_acc: 0.9172 Epoch 66/200 48000/48000 [==============================] - 0s - loss: 0.3098 - acc: 0.9145 - val_loss: 0.2984 - val_acc: 0.9173 Epoch 67/200 48000/48000 [==============================] - 1s - loss: 0.3093 - acc: 0.9145 - val_loss: 0.2979 - val_acc: 0.9170 Epoch 68/200 48000/48000 [==============================] - 1s - loss: 0.3087 - acc: 0.9147 - val_loss: 0.2975 - val_acc: 0.9173 Epoch 69/200 48000/48000 [==============================] - 1s - loss: 0.3081 - acc: 0.9147 - val_loss: 0.2972 - val_acc: 0.9174 Epoch 70/200 48000/48000 [==============================] - 1s - loss: 0.3076 - acc: 0.9148 - val_loss: 0.2967 - val_acc: 0.9175 Epoch 71/200 48000/48000 [==============================] - 0s - loss: 0.3070 - acc: 0.9149 - val_loss: 0.2963 - val_acc: 0.9179 Epoch 72/200 48000/48000 [==============================] - 1s - loss: 0.3066 - acc: 0.9152 - val_loss: 0.2960 - val_acc: 0.9174 Epoch 73/200 48000/48000 [==============================] - 1s - loss: 0.3060 - acc: 0.9154 - val_loss: 0.2956 - val_acc: 0.9176 Epoch 74/200 48000/48000 [==============================] - 1s - loss: 0.3056 - acc: 0.9153 - val_loss: 0.2952 - val_acc: 0.9176 Epoch 75/200 48000/48000 [==============================] - 1s - loss: 0.3051 - acc: 0.9157 - val_loss: 0.2948 - val_acc: 0.9174 Epoch 76/200 48000/48000 [==============================] - 0s - loss: 0.3046 - acc: 0.9156 - val_loss: 0.2945 - val_acc: 0.9183 Epoch 77/200 48000/48000 [==============================] - 1s - loss: 0.3041 - acc: 0.9157 - val_loss: 0.2941 - val_acc: 0.9183 Epoch 78/200 48000/48000 [==============================] - 1s - loss: 0.3037 - acc: 0.9159 - val_loss: 0.2939 - val_acc: 0.9188 Epoch 79/200 48000/48000 [==============================] - 1s - loss: 0.3032 - acc: 0.9161 - val_loss: 0.2935 - val_acc: 0.9177 Epoch 80/200 48000/48000 [==============================] - 0s - loss: 0.3028 - acc: 0.9162 - val_loss: 0.2931 - val_acc: 0.9182 Epoch 81/200 48000/48000 [==============================] - 1s - loss: 0.3023 - acc: 0.9161 - val_loss: 0.2929 - val_acc: 0.9185 Epoch 82/200 48000/48000 [==============================] - 1s - loss: 0.3019 - acc: 0.9163 - val_loss: 0.2926 - val_acc: 0.9184 Epoch 83/200 48000/48000 [==============================] - 1s - loss: 0.3015 - acc: 0.9165 - val_loss: 0.2922 - val_acc: 0.9182 Epoch 84/200 48000/48000 [==============================] - 1s - loss: 0.3011 - acc: 0.9165 - val_loss: 0.2919 - val_acc: 0.9188 Epoch 85/200 48000/48000 [==============================] - 0s - loss: 0.3007 - acc: 0.9167 - val_loss: 0.2916 - val_acc: 0.9183 Epoch 86/200 48000/48000 [==============================] - 1s - loss: 0.3002 - acc: 0.9170 - val_loss: 0.2914 - val_acc: 0.9189 Epoch 87/200 48000/48000 [==============================] - 1s - loss: 0.2999 - acc: 0.9168 - val_loss: 0.2910 - val_acc: 0.9192 Epoch 88/200 48000/48000 [==============================] - 1s - loss: 0.2995 - acc: 0.9169 - val_loss: 0.2907 - val_acc: 0.9188 Epoch 89/200 48000/48000 [==============================] - 1s - loss: 0.2991 - acc: 0.9172 - val_loss: 0.2904 - val_acc: 0.9188 Epoch 90/200 48000/48000 [==============================] - 0s - loss: 0.2987 - acc: 0.9172 - val_loss: 0.2902 - val_acc: 0.9187 Epoch 91/200 48000/48000 [==============================] - 1s - loss: 0.2984 - acc: 0.9173 - val_loss: 0.2899 - val_acc: 0.9188 Epoch 92/200 48000/48000 [==============================] - 1s - loss: 0.2980 - acc: 0.9172 - val_loss: 0.2896 - val_acc: 0.9188 Epoch 93/200 48000/48000 [==============================] - 1s - loss: 0.2977 - acc: 0.9174 - val_loss: 0.2894 - val_acc: 0.9193 Epoch 94/200 48000/48000 [==============================] - 1s - loss: 0.2973 - acc: 0.9174 - val_loss: 0.2891 - val_acc: 0.9193 Epoch 95/200 48000/48000 [==============================] - 0s - loss: 0.2970 - acc: 0.9175 - val_loss: 0.2890 - val_acc: 0.9189 Epoch 96/200 48000/48000 [==============================] - 1s - loss: 0.2966 - acc: 0.9175 - val_loss: 0.2886 - val_acc: 0.9194 Epoch 97/200 48000/48000 [==============================] - 1s - loss: 0.2963 - acc: 0.9178 - val_loss: 0.2884 - val_acc: 0.9197 Epoch 98/200 48000/48000 [==============================] - 1s - loss: 0.2960 - acc: 0.9179 - val_loss: 0.2882 - val_acc: 0.9194 Epoch 99/200 48000/48000 [==============================] - 0s - loss: 0.2956 - acc: 0.9176 - val_loss: 0.2879 - val_acc: 0.9199 Epoch 100/200 48000/48000 [==============================] - 1s - loss: 0.2953 - acc: 0.9178 - val_loss: 0.2878 - val_acc: 0.9198 Epoch 101/200 48000/48000 [==============================] - 1s - loss: 0.2950 - acc: 0.9179 - val_loss: 0.2875 - val_acc: 0.9197 Epoch 102/200 48000/48000 [==============================] - 1s - loss: 0.2947 - acc: 0.9178 - val_loss: 0.2873 - val_acc: 0.9202 Epoch 103/200 48000/48000 [==============================] - 1s - loss: 0.2944 - acc: 0.9179 - val_loss: 0.2871 - val_acc: 0.9200 Epoch 104/200 48000/48000 [==============================] - 0s - loss: 0.2941 - acc: 0.9181 - val_loss: 0.2869 - val_acc: 0.9201 Epoch 105/200 48000/48000 [==============================] - 1s - loss: 0.2938 - acc: 0.9182 - val_loss: 0.2867 - val_acc: 0.9203 Epoch 106/200 48000/48000 [==============================] - 1s - loss: 0.2935 - acc: 0.9182 - val_loss: 0.2864 - val_acc: 0.9199 Epoch 107/200 48000/48000 [==============================] - 1s - loss: 0.2932 - acc: 0.9184 - val_loss: 0.2863 - val_acc: 0.9200 Epoch 108/200 48000/48000 [==============================] - 1s - loss: 0.2929 - acc: 0.9185 - val_loss: 0.2861 - val_acc: 0.9208 Epoch 109/200 48000/48000 [==============================] - 0s - loss: 0.2926 - acc: 0.9184 - val_loss: 0.2859 - val_acc: 0.9206 Epoch 110/200 48000/48000 [==============================] - 1s - loss: 0.2923 - acc: 0.9187 - val_loss: 0.2857 - val_acc: 0.9207 Epoch 111/200 48000/48000 [==============================] - 1s - loss: 0.2921 - acc: 0.9187 - val_loss: 0.2854 - val_acc: 0.9217 Epoch 112/200 48000/48000 [==============================] - 1s - loss: 0.2918 - acc: 0.9189 - val_loss: 0.2853 - val_acc: 0.9204 Epoch 113/200 48000/48000 [==============================] - 1s - loss: 0.2915 - acc: 0.9189 - val_loss: 0.2851 - val_acc: 0.9211 Epoch 114/200 48000/48000 [==============================] - 0s - loss: 0.2912 - acc: 0.9186 - val_loss: 0.2849 - val_acc: 0.9213 Epoch 115/200 48000/48000 [==============================] - 1s - loss: 0.2909 - acc: 0.9188 - val_loss: 0.2849 - val_acc: 0.9212 Epoch 116/200 48000/48000 [==============================] - 1s - loss: 0.2907 - acc: 0.9187 - val_loss: 0.2845 - val_acc: 0.9212 Epoch 117/200 48000/48000 [==============================] - 1s - loss: 0.2904 - acc: 0.9192 - val_loss: 0.2843 - val_acc: 0.9212 Epoch 118/200 48000/48000 [==============================] - 0s - loss: 0.2902 - acc: 0.9194 - val_loss: 0.2842 - val_acc: 0.9215 Epoch 119/200 48000/48000 [==============================] - 1s - loss: 0.2900 - acc: 0.9192 - val_loss: 0.2841 - val_acc: 0.9215 Epoch 120/200 48000/48000 [==============================] - 1s - loss: 0.2897 - acc: 0.9194 - val_loss: 0.2838 - val_acc: 0.9212 Epoch 121/200 48000/48000 [==============================] - 1s - loss: 0.2895 - acc: 0.9194 - val_loss: 0.2837 - val_acc: 0.9213 Epoch 122/200 48000/48000 [==============================] - 1s - loss: 0.2892 - acc: 0.9195 - val_loss: 0.2836 - val_acc: 0.9212 Epoch 123/200 48000/48000 [==============================] - 0s - loss: 0.2890 - acc: 0.9196 - val_loss: 0.2834 - val_acc: 0.9216 Epoch 124/200 48000/48000 [==============================] - 1s - loss: 0.2888 - acc: 0.9194 - val_loss: 0.2832 - val_acc: 0.9218 Epoch 125/200 48000/48000 [==============================] - 1s - loss: 0.2885 - acc: 0.9195 - val_loss: 0.2831 - val_acc: 0.9218 Epoch 126/200 48000/48000 [==============================] - 1s - loss: 0.2883 - acc: 0.9197 - val_loss: 0.2830 - val_acc: 0.9215 Epoch 127/200 48000/48000 [==============================] - 1s - loss: 0.2881 - acc: 0.9196 - val_loss: 0.2828 - val_acc: 0.9218 Epoch 128/200 48000/48000 [==============================] - 0s - loss: 0.2878 - acc: 0.9196 - val_loss: 0.2826 - val_acc: 0.9215 Epoch 129/200 48000/48000 [==============================] - 1s - loss: 0.2876 - acc: 0.9199 - val_loss: 0.2824 - val_acc: 0.9216 Epoch 130/200 48000/48000 [==============================] - 1s - loss: 0.2874 - acc: 0.9199 - val_loss: 0.2822 - val_acc: 0.9221 Epoch 131/200 48000/48000 [==============================] - 1s - loss: 0.2871 - acc: 0.9201 - val_loss: 0.2823 - val_acc: 0.9216 Epoch 132/200 48000/48000 [==============================] - 1s - loss: 0.2869 - acc: 0.9200 - val_loss: 0.2820 - val_acc: 0.9219 Epoch 133/200 48000/48000 [==============================] - 0s - loss: 0.2867 - acc: 0.9198 - val_loss: 0.2820 - val_acc: 0.9220 Epoch 134/200 48000/48000 [==============================] - 1s - loss: 0.2865 - acc: 0.9200 - val_loss: 0.2818 - val_acc: 0.9219 Epoch 135/200 48000/48000 [==============================] - 1s - loss: 0.2863 - acc: 0.9201 - val_loss: 0.2817 - val_acc: 0.9223 Epoch 136/200 48000/48000 [==============================] - 1s - loss: 0.2861 - acc: 0.9199 - val_loss: 0.2816 - val_acc: 0.9221 Epoch 137/200 48000/48000 [==============================] - 0s - loss: 0.2859 - acc: 0.9204 - val_loss: 0.2814 - val_acc: 0.9220 Epoch 138/200 48000/48000 [==============================] - 1s - loss: 0.2857 - acc: 0.9201 - val_loss: 0.2812 - val_acc: 0.9220 Epoch 139/200 48000/48000 [==============================] - 1s - loss: 0.2855 - acc: 0.9203 - val_loss: 0.2811 - val_acc: 0.9222 Epoch 140/200 48000/48000 [==============================] - 1s - loss: 0.2853 - acc: 0.9203 - val_loss: 0.2810 - val_acc: 0.9221 Epoch 141/200 48000/48000 [==============================] - 1s - loss: 0.2851 - acc: 0.9206 - val_loss: 0.2808 - val_acc: 0.9223 Epoch 142/200 48000/48000 [==============================] - 0s - loss: 0.2849 - acc: 0.9205 - val_loss: 0.2807 - val_acc: 0.9219 Epoch 143/200 48000/48000 [==============================] - 1s - loss: 0.2847 - acc: 0.9204 - val_loss: 0.2806 - val_acc: 0.9223 Epoch 144/200 48000/48000 [==============================] - 1s - loss: 0.2845 - acc: 0.9206 - val_loss: 0.2804 - val_acc: 0.9220 Epoch 145/200 48000/48000 [==============================] - 1s - loss: 0.2844 - acc: 0.9206 - val_loss: 0.2803 - val_acc: 0.9223 Epoch 146/200 48000/48000 [==============================] - 1s - loss: 0.2841 - acc: 0.9208 - val_loss: 0.2803 - val_acc: 0.9223 Epoch 147/200 48000/48000 [==============================] - 0s - loss: 0.2840 - acc: 0.9204 - val_loss: 0.2801 - val_acc: 0.9225 Epoch 148/200 48000/48000 [==============================] - 1s - loss: 0.2838 - acc: 0.9207 - val_loss: 0.2800 - val_acc: 0.9220 Epoch 149/200 48000/48000 [==============================] - 1s - loss: 0.2836 - acc: 0.9206 - val_loss: 0.2798 - val_acc: 0.9223 Epoch 150/200 48000/48000 [==============================] - 1s - loss: 0.2834 - acc: 0.9210 - val_loss: 0.2797 - val_acc: 0.9226 Epoch 151/200 48000/48000 [==============================] - 1s - loss: 0.2833 - acc: 0.9207 - val_loss: 0.2796 - val_acc: 0.9228 Epoch 152/200 48000/48000 [==============================] - 0s - loss: 0.2831 - acc: 0.9210 - val_loss: 0.2795 - val_acc: 0.9222 Epoch 153/200 48000/48000 [==============================] - 1s - loss: 0.2829 - acc: 0.9210 - val_loss: 0.2794 - val_acc: 0.9224 Epoch 154/200 48000/48000 [==============================] - 1s - loss: 0.2827 - acc: 0.9212 - val_loss: 0.2793 - val_acc: 0.9226 Epoch 155/200 48000/48000 [==============================] - 1s - loss: 0.2825 - acc: 0.9213 - val_loss: 0.2792 - val_acc: 0.9226 Epoch 156/200 48000/48000 [==============================] - 0s - loss: 0.2824 - acc: 0.9213 - val_loss: 0.2791 - val_acc: 0.9227 Epoch 157/200 48000/48000 [==============================] - 1s - loss: 0.2822 - acc: 0.9212 - val_loss: 0.2790 - val_acc: 0.9229 Epoch 158/200 48000/48000 [==============================] - 1s - loss: 0.2821 - acc: 0.9214 - val_loss: 0.2789 - val_acc: 0.9224 Epoch 159/200 48000/48000 [==============================] - 1s - loss: 0.2819 - acc: 0.9215 - val_loss: 0.2788 - val_acc: 0.9223 Epoch 160/200 48000/48000 [==============================] - 1s - loss: 0.2817 - acc: 0.9215 - val_loss: 0.2786 - val_acc: 0.9225 Epoch 161/200 48000/48000 [==============================] - 0s - loss: 0.2815 - acc: 0.9218 - val_loss: 0.2786 - val_acc: 0.9231 Epoch 162/200 48000/48000 [==============================] - 1s - loss: 0.2814 - acc: 0.9217 - val_loss: 0.2785 - val_acc: 0.9223 Epoch 163/200 48000/48000 [==============================] - 1s - loss: 0.2812 - acc: 0.9215 - val_loss: 0.2784 - val_acc: 0.9223 Epoch 164/200 48000/48000 [==============================] - 1s - loss: 0.2811 - acc: 0.9216 - val_loss: 0.2783 - val_acc: 0.9226 Epoch 165/200 48000/48000 [==============================] - 1s - loss: 0.2809 - acc: 0.9217 - val_loss: 0.2782 - val_acc: 0.9228 Epoch 166/200 48000/48000 [==============================] - 0s - loss: 0.2807 - acc: 0.9218 - val_loss: 0.2781 - val_acc: 0.9228 Epoch 167/200 48000/48000 [==============================] - 1s - loss: 0.2806 - acc: 0.9220 - val_loss: 0.2780 - val_acc: 0.9230 Epoch 168/200 48000/48000 [==============================] - 1s - loss: 0.2804 - acc: 0.9218 - val_loss: 0.2779 - val_acc: 0.9228 Epoch 169/200 48000/48000 [==============================] - 1s - loss: 0.2803 - acc: 0.9220 - val_loss: 0.2778 - val_acc: 0.9229 Epoch 170/200 48000/48000 [==============================] - 0s - loss: 0.2801 - acc: 0.9220 - val_loss: 0.2777 - val_acc: 0.9228 Epoch 171/200 48000/48000 [==============================] - 1s - loss: 0.2800 - acc: 0.9220 - val_loss: 0.2776 - val_acc: 0.9227 Epoch 172/200 48000/48000 [==============================] - 1s - loss: 0.2798 - acc: 0.9224 - val_loss: 0.2776 - val_acc: 0.9235 Epoch 173/200 48000/48000 [==============================] - 1s - loss: 0.2797 - acc: 0.9218 - val_loss: 0.2774 - val_acc: 0.9231 Epoch 174/200 48000/48000 [==============================] - 1s - loss: 0.2795 - acc: 0.9221 - val_loss: 0.2774 - val_acc: 0.9226 Epoch 175/200 48000/48000 [==============================] - 0s - loss: 0.2794 - acc: 0.9222 - val_loss: 0.2773 - val_acc: 0.9228 Epoch 176/200 48000/48000 [==============================] - 1s - loss: 0.2792 - acc: 0.9223 - val_loss: 0.2772 - val_acc: 0.9229 Epoch 177/200 48000/48000 [==============================] - 1s - loss: 0.2791 - acc: 0.9222 - val_loss: 0.2771 - val_acc: 0.9228 Epoch 178/200 48000/48000 [==============================] - 1s - loss: 0.2789 - acc: 0.9223 - val_loss: 0.2772 - val_acc: 0.9226 Epoch 179/200 48000/48000 [==============================] - 1s - loss: 0.2788 - acc: 0.9225 - val_loss: 0.2770 - val_acc: 0.9230 Epoch 180/200 48000/48000 [==============================] - 0s - loss: 0.2787 - acc: 0.9221 - val_loss: 0.2769 - val_acc: 0.9230 Epoch 181/200 48000/48000 [==============================] - 1s - loss: 0.2785 - acc: 0.9224 - val_loss: 0.2768 - val_acc: 0.9225 Epoch 182/200 48000/48000 [==============================] - 1s - loss: 0.2784 - acc: 0.9224 - val_loss: 0.2767 - val_acc: 0.9227 Epoch 183/200 48000/48000 [==============================] - 1s - loss: 0.2782 - acc: 0.9223 - val_loss: 0.2766 - val_acc: 0.9223 Epoch 184/200 48000/48000 [==============================] - 1s - loss: 0.2781 - acc: 0.9226 - val_loss: 0.2765 - val_acc: 0.9228 Epoch 185/200 48000/48000 [==============================] - 0s - loss: 0.2780 - acc: 0.9225 - val_loss: 0.2764 - val_acc: 0.9230 Epoch 186/200 48000/48000 [==============================] - 1s - loss: 0.2779 - acc: 0.9224 - val_loss: 0.2763 - val_acc: 0.9230 Epoch 187/200 48000/48000 [==============================] - 1s - loss: 0.2777 - acc: 0.9225 - val_loss: 0.2763 - val_acc: 0.9233 Epoch 188/200 48000/48000 [==============================] - 1s - loss: 0.2776 - acc: 0.9226 - val_loss: 0.2762 - val_acc: 0.9229 Epoch 189/200 48000/48000 [==============================] - 0s - loss: 0.2775 - acc: 0.9226 - val_loss: 0.2761 - val_acc: 0.9233 Epoch 190/200 48000/48000 [==============================] - 1s - loss: 0.2773 - acc: 0.9226 - val_loss: 0.2761 - val_acc: 0.9233 Epoch 191/200 48000/48000 [==============================] - 1s - loss: 0.2772 - acc: 0.9225 - val_loss: 0.2760 - val_acc: 0.9230 Epoch 192/200 48000/48000 [==============================] - 1s - loss: 0.2771 - acc: 0.9226 - val_loss: 0.2759 - val_acc: 0.9235 Epoch 193/200 48000/48000 [==============================] - 1s - loss: 0.2770 - acc: 0.9228 - val_loss: 0.2758 - val_acc: 0.9234 Epoch 194/200 48000/48000 [==============================] - 0s - loss: 0.2768 - acc: 0.9228 - val_loss: 0.2759 - val_acc: 0.9228 Epoch 195/200 48000/48000 [==============================] - 1s - loss: 0.2767 - acc: 0.9228 - val_loss: 0.2757 - val_acc: 0.9230 Epoch 196/200 48000/48000 [==============================] - 1s - loss: 0.2766 - acc: 0.9227 - val_loss: 0.2756 - val_acc: 0.9234 Epoch 197/200 48000/48000 [==============================] - 1s - loss: 0.2765 - acc: 0.9229 - val_loss: 0.2756 - val_acc: 0.9231 Epoch 198/200 48000/48000 [==============================] - 1s - loss: 0.2763 - acc: 0.9229 - val_loss: 0.2755 - val_acc: 0.9236 Epoch 199/200 48000/48000 [==============================] - 0s - loss: 0.2762 - acc: 0.9229 - val_loss: 0.2754 - val_acc: 0.9234 Epoch 200/200 48000/48000 [==============================] - 1s - loss: 0.2760 - acc: 0.9230 - val_loss: 0.2754 - val_acc: 0.9238 9504/10000 [===========================>..] - ETA: 0s Test score: 0.277522749448 Test accuracy: 0.9225