In [0]:
# BASIC WALK THROUGH FOR MNIST NN
# BASED ON KERAS TUTORIALS (2019)

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.utils import np_utils
import matplotlib.pyplot as plt
Using TensorFlow backend.

The default version of TensorFlow in Colab will soon switch to TensorFlow 2.x.
We recommend you upgrade now or ensure your notebook will continue to use TensorFlow 1.x via the %tensorflow_version 1.x magic: more info.

OBTAIN

In [0]:
from keras.datasets import mnist
(X_train, y_train), (X_test, y_test) = mnist.load_data()

SCRUB

In [0]:
# FLATTEN 28 x 28 IMAGE TO 784 VECTOR
num_pixels = X_train.shape[1] * X_train.shape[2]
X_train = X_train.reshape(X_train.shape[0], num_pixels).astype('float32')
X_test = X_test.reshape(X_test.shape[0], num_pixels).astype('float32')
In [0]:
# NORMALIZE INPUTS FROM RGB COLOR TO 0-1
X_train = X_train / 255
X_test = X_test / 255

# THE OLD ONE HOT ENCODE - CONVERT "CATEGORICAL" CLASSIFICATION TO ENCODE
# A "BINARIZATION" OF THE CATEGORIES
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]

MODEL

In [0]:
# BUILD BASELINE
def baseline_model():
    # create model
    model = Sequential()
    model.add(Dense(num_pixels, input_dim=num_pixels, kernel_initializer='normal', activation='relu'))
    model.add(Dense(num_classes, kernel_initializer='normal', activation='softmax'))
    # Compile model
    model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
    return model 
In [0]:
# RUN BASELINE
model = baseline_model()

# FIT THE MODEL
history  = model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=300, batch_size=1000, verbose=2)

# EVALUATE THE MODEL
scores = model.evaluate(X_test, y_test, verbose=0)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:66: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:541: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4409: The name tf.random_normal is deprecated. Please use tf.random.normal instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/optimizers.py:793: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:3576: The name tf.log is deprecated. Please use tf.math.log instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/math_grad.py:1424: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.where in 2.0, which has the same broadcast rule as np.where
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:1033: The name tf.assign_add is deprecated. Please use tf.compat.v1.assign_add instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:1020: The name tf.assign is deprecated. Please use tf.compat.v1.assign instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:3005: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.

Train on 60000 samples, validate on 10000 samples
Epoch 1/300
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:190: The name tf.get_default_session is deprecated. Please use tf.compat.v1.get_default_session instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:197: The name tf.ConfigProto is deprecated. Please use tf.compat.v1.ConfigProto instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:207: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:216: The name tf.is_variable_initialized is deprecated. Please use tf.compat.v1.is_variable_initialized instead.

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:223: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.

 - 4s - loss: 0.5222 - acc: 0.8570 - val_loss: 0.2381 - val_acc: 0.9323
Epoch 2/300
 - 3s - loss: 0.2010 - acc: 0.9439 - val_loss: 0.1645 - val_acc: 0.9527
Epoch 3/300
 - 3s - loss: 0.1454 - acc: 0.9591 - val_loss: 0.1331 - val_acc: 0.9596
Epoch 4/300
 - 3s - loss: 0.1122 - acc: 0.9687 - val_loss: 0.1090 - val_acc: 0.9674
Epoch 5/300
 - 3s - loss: 0.0916 - acc: 0.9743 - val_loss: 0.0952 - val_acc: 0.9723
Epoch 6/300
 - 3s - loss: 0.0740 - acc: 0.9798 - val_loss: 0.0843 - val_acc: 0.9751
Epoch 7/300
 - 3s - loss: 0.0617 - acc: 0.9832 - val_loss: 0.0810 - val_acc: 0.9748
Epoch 8/300
 - 3s - loss: 0.0525 - acc: 0.9861 - val_loss: 0.0750 - val_acc: 0.9779
Epoch 9/300
 - 3s - loss: 0.0438 - acc: 0.9888 - val_loss: 0.0722 - val_acc: 0.9783
Epoch 10/300
 - 3s - loss: 0.0372 - acc: 0.9908 - val_loss: 0.0659 - val_acc: 0.9795
Epoch 11/300
 - 3s - loss: 0.0317 - acc: 0.9925 - val_loss: 0.0645 - val_acc: 0.9806
Epoch 12/300
 - 3s - loss: 0.0277 - acc: 0.9937 - val_loss: 0.0628 - val_acc: 0.9804
Epoch 13/300
 - 3s - loss: 0.0244 - acc: 0.9947 - val_loss: 0.0604 - val_acc: 0.9819
Epoch 14/300
 - 3s - loss: 0.0206 - acc: 0.9961 - val_loss: 0.0594 - val_acc: 0.9817
Epoch 15/300
 - 3s - loss: 0.0179 - acc: 0.9968 - val_loss: 0.0619 - val_acc: 0.9804
Epoch 16/300
 - 3s - loss: 0.0153 - acc: 0.9974 - val_loss: 0.0596 - val_acc: 0.9816
Epoch 17/300
 - 3s - loss: 0.0132 - acc: 0.9983 - val_loss: 0.0590 - val_acc: 0.9816
Epoch 18/300
 - 3s - loss: 0.0116 - acc: 0.9987 - val_loss: 0.0605 - val_acc: 0.9810
Epoch 19/300
 - 3s - loss: 0.0102 - acc: 0.9989 - val_loss: 0.0587 - val_acc: 0.9815
Epoch 20/300
 - 3s - loss: 0.0088 - acc: 0.9993 - val_loss: 0.0572 - val_acc: 0.9827
Epoch 21/300
 - 3s - loss: 0.0078 - acc: 0.9994 - val_loss: 0.0589 - val_acc: 0.9825
Epoch 22/300
 - 3s - loss: 0.0068 - acc: 0.9995 - val_loss: 0.0581 - val_acc: 0.9828
Epoch 23/300
 - 3s - loss: 0.0062 - acc: 0.9996 - val_loss: 0.0597 - val_acc: 0.9820
Epoch 24/300
 - 3s - loss: 0.0054 - acc: 0.9997 - val_loss: 0.0585 - val_acc: 0.9828
Epoch 25/300
 - 3s - loss: 0.0049 - acc: 0.9998 - val_loss: 0.0593 - val_acc: 0.9822
Epoch 26/300
 - 3s - loss: 0.0043 - acc: 0.9999 - val_loss: 0.0604 - val_acc: 0.9821
Epoch 27/300
 - 3s - loss: 0.0039 - acc: 0.9999 - val_loss: 0.0606 - val_acc: 0.9819
Epoch 28/300
 - 3s - loss: 0.0036 - acc: 0.9999 - val_loss: 0.0598 - val_acc: 0.9822
Epoch 29/300
 - 3s - loss: 0.0032 - acc: 0.9999 - val_loss: 0.0599 - val_acc: 0.9829
Epoch 30/300
 - 3s - loss: 0.0029 - acc: 1.0000 - val_loss: 0.0608 - val_acc: 0.9818
Epoch 31/300
 - 3s - loss: 0.0027 - acc: 1.0000 - val_loss: 0.0615 - val_acc: 0.9821
Epoch 32/300
 - 3s - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0612 - val_acc: 0.9823
Epoch 33/300
 - 3s - loss: 0.0023 - acc: 1.0000 - val_loss: 0.0623 - val_acc: 0.9816
Epoch 34/300
 - 3s - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0611 - val_acc: 0.9825
Epoch 35/300
 - 3s - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0622 - val_acc: 0.9823
Epoch 36/300
 - 3s - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0624 - val_acc: 0.9825
Epoch 37/300
 - 3s - loss: 0.0016 - acc: 1.0000 - val_loss: 0.0625 - val_acc: 0.9823
Epoch 38/300
 - 3s - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0625 - val_acc: 0.9827
Epoch 39/300
 - 3s - loss: 0.0014 - acc: 1.0000 - val_loss: 0.0639 - val_acc: 0.9822
Epoch 40/300
 - 3s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0632 - val_acc: 0.9832
Epoch 41/300
 - 3s - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0638 - val_acc: 0.9827
Epoch 42/300
 - 3s - loss: 0.0011 - acc: 1.0000 - val_loss: 0.0632 - val_acc: 0.9830
Epoch 43/300
 - 3s - loss: 0.0011 - acc: 1.0000 - val_loss: 0.0647 - val_acc: 0.9824
Epoch 44/300
 - 3s - loss: 0.0010 - acc: 1.0000 - val_loss: 0.0643 - val_acc: 0.9827
Epoch 45/300
 - 3s - loss: 9.6194e-04 - acc: 1.0000 - val_loss: 0.0647 - val_acc: 0.9825
Epoch 46/300
 - 3s - loss: 9.0778e-04 - acc: 1.0000 - val_loss: 0.0649 - val_acc: 0.9828
Epoch 47/300
 - 3s - loss: 8.3689e-04 - acc: 1.0000 - val_loss: 0.0655 - val_acc: 0.9826
Epoch 48/300
 - 3s - loss: 8.0184e-04 - acc: 1.0000 - val_loss: 0.0653 - val_acc: 0.9827
Epoch 49/300
 - 3s - loss: 7.5783e-04 - acc: 1.0000 - val_loss: 0.0654 - val_acc: 0.9829
Epoch 50/300
 - 3s - loss: 7.0835e-04 - acc: 1.0000 - val_loss: 0.0662 - val_acc: 0.9829
Epoch 51/300
 - 3s - loss: 6.7129e-04 - acc: 1.0000 - val_loss: 0.0663 - val_acc: 0.9829
Epoch 52/300
 - 3s - loss: 6.4625e-04 - acc: 1.0000 - val_loss: 0.0665 - val_acc: 0.9834
Epoch 53/300
 - 3s - loss: 6.0120e-04 - acc: 1.0000 - val_loss: 0.0664 - val_acc: 0.9830
Epoch 54/300
 - 3s - loss: 5.8912e-04 - acc: 1.0000 - val_loss: 0.0668 - val_acc: 0.9825
Epoch 55/300
 - 3s - loss: 5.4232e-04 - acc: 1.0000 - val_loss: 0.0676 - val_acc: 0.9826
Epoch 56/300
 - 3s - loss: 5.1240e-04 - acc: 1.0000 - val_loss: 0.0674 - val_acc: 0.9825
Epoch 57/300
 - 3s - loss: 4.8981e-04 - acc: 1.0000 - val_loss: 0.0678 - val_acc: 0.9831
Epoch 58/300
 - 3s - loss: 4.6388e-04 - acc: 1.0000 - val_loss: 0.0675 - val_acc: 0.9833
Epoch 59/300
 - 3s - loss: 4.4094e-04 - acc: 1.0000 - val_loss: 0.0680 - val_acc: 0.9829
Epoch 60/300
 - 3s - loss: 4.2229e-04 - acc: 1.0000 - val_loss: 0.0683 - val_acc: 0.9832
Epoch 61/300
 - 3s - loss: 4.0432e-04 - acc: 1.0000 - val_loss: 0.0681 - val_acc: 0.9832
Epoch 62/300
 - 3s - loss: 3.8754e-04 - acc: 1.0000 - val_loss: 0.0689 - val_acc: 0.9824
Epoch 63/300
 - 3s - loss: 3.6442e-04 - acc: 1.0000 - val_loss: 0.0687 - val_acc: 0.9826
Epoch 64/300
 - 3s - loss: 3.4488e-04 - acc: 1.0000 - val_loss: 0.0694 - val_acc: 0.9830
Epoch 65/300
 - 3s - loss: 3.3590e-04 - acc: 1.0000 - val_loss: 0.0694 - val_acc: 0.9829
Epoch 66/300
 - 3s - loss: 3.1757e-04 - acc: 1.0000 - val_loss: 0.0698 - val_acc: 0.9824
Epoch 67/300
 - 3s - loss: 3.0166e-04 - acc: 1.0000 - val_loss: 0.0702 - val_acc: 0.9830
Epoch 68/300
 - 3s - loss: 2.8783e-04 - acc: 1.0000 - val_loss: 0.0699 - val_acc: 0.9829
Epoch 69/300
 - 3s - loss: 2.7600e-04 - acc: 1.0000 - val_loss: 0.0699 - val_acc: 0.9830
Epoch 70/300
 - 3s - loss: 2.6438e-04 - acc: 1.0000 - val_loss: 0.0706 - val_acc: 0.9828
Epoch 71/300
 - 3s - loss: 2.5033e-04 - acc: 1.0000 - val_loss: 0.0706 - val_acc: 0.9828
Epoch 72/300
 - 3s - loss: 2.4264e-04 - acc: 1.0000 - val_loss: 0.0717 - val_acc: 0.9828
Epoch 73/300
 - 3s - loss: 2.3041e-04 - acc: 1.0000 - val_loss: 0.0713 - val_acc: 0.9830
Epoch 74/300
 - 3s - loss: 2.2231e-04 - acc: 1.0000 - val_loss: 0.0716 - val_acc: 0.9830
Epoch 75/300
 - 3s - loss: 2.1054e-04 - acc: 1.0000 - val_loss: 0.0719 - val_acc: 0.9832
Epoch 76/300
 - 3s - loss: 2.0212e-04 - acc: 1.0000 - val_loss: 0.0715 - val_acc: 0.9832
Epoch 77/300
 - 3s - loss: 1.9441e-04 - acc: 1.0000 - val_loss: 0.0718 - val_acc: 0.9834
Epoch 78/300
 - 3s - loss: 1.8792e-04 - acc: 1.0000 - val_loss: 0.0721 - val_acc: 0.9831
Epoch 79/300
 - 3s - loss: 1.7854e-04 - acc: 1.0000 - val_loss: 0.0725 - val_acc: 0.9830
Epoch 80/300
 - 3s - loss: 1.7123e-04 - acc: 1.0000 - val_loss: 0.0730 - val_acc: 0.9830
Epoch 81/300
 - 3s - loss: 1.6479e-04 - acc: 1.0000 - val_loss: 0.0732 - val_acc: 0.9833
Epoch 82/300
 - 3s - loss: 1.5884e-04 - acc: 1.0000 - val_loss: 0.0733 - val_acc: 0.9832
Epoch 83/300
 - 3s - loss: 1.5171e-04 - acc: 1.0000 - val_loss: 0.0732 - val_acc: 0.9830
Epoch 84/300
 - 3s - loss: 1.4709e-04 - acc: 1.0000 - val_loss: 0.0736 - val_acc: 0.9833
Epoch 85/300
 - 3s - loss: 1.3951e-04 - acc: 1.0000 - val_loss: 0.0739 - val_acc: 0.9832
Epoch 86/300
 - 3s - loss: 1.3498e-04 - acc: 1.0000 - val_loss: 0.0739 - val_acc: 0.9831
Epoch 87/300
 - 3s - loss: 1.2849e-04 - acc: 1.0000 - val_loss: 0.0740 - val_acc: 0.9830
Epoch 88/300
 - 3s - loss: 1.2299e-04 - acc: 1.0000 - val_loss: 0.0747 - val_acc: 0.9831
Epoch 89/300
 - 3s - loss: 1.1987e-04 - acc: 1.0000 - val_loss: 0.0745 - val_acc: 0.9830
Epoch 90/300
 - 3s - loss: 1.1488e-04 - acc: 1.0000 - val_loss: 0.0749 - val_acc: 0.9831
Epoch 91/300
 - 3s - loss: 1.0998e-04 - acc: 1.0000 - val_loss: 0.0754 - val_acc: 0.9833
Epoch 92/300
 - 3s - loss: 1.0693e-04 - acc: 1.0000 - val_loss: 0.0753 - val_acc: 0.9831
Epoch 93/300
 - 3s - loss: 1.0226e-04 - acc: 1.0000 - val_loss: 0.0754 - val_acc: 0.9832
Epoch 94/300
 - 3s - loss: 9.8189e-05 - acc: 1.0000 - val_loss: 0.0756 - val_acc: 0.9834
Epoch 95/300
 - 3s - loss: 9.4611e-05 - acc: 1.0000 - val_loss: 0.0755 - val_acc: 0.9830
Epoch 96/300
 - 3s - loss: 9.1917e-05 - acc: 1.0000 - val_loss: 0.0766 - val_acc: 0.9833
Epoch 97/300
 - 3s - loss: 8.7415e-05 - acc: 1.0000 - val_loss: 0.0763 - val_acc: 0.9833
Epoch 98/300
 - 3s - loss: 8.4045e-05 - acc: 1.0000 - val_loss: 0.0766 - val_acc: 0.9829
Epoch 99/300
 - 3s - loss: 8.0744e-05 - acc: 1.0000 - val_loss: 0.0767 - val_acc: 0.9828
Epoch 100/300
 - 3s - loss: 7.8602e-05 - acc: 1.0000 - val_loss: 0.0767 - val_acc: 0.9831
Epoch 101/300
 - 3s - loss: 7.5436e-05 - acc: 1.0000 - val_loss: 0.0775 - val_acc: 0.9829
Epoch 102/300
 - 3s - loss: 7.2617e-05 - acc: 1.0000 - val_loss: 0.0774 - val_acc: 0.9833
Epoch 103/300
 - 3s - loss: 6.9956e-05 - acc: 1.0000 - val_loss: 0.0776 - val_acc: 0.9834
Epoch 104/300
 - 3s - loss: 6.7348e-05 - acc: 1.0000 - val_loss: 0.0775 - val_acc: 0.9835
Epoch 105/300
 - 3s - loss: 6.4409e-05 - acc: 1.0000 - val_loss: 0.0780 - val_acc: 0.9835
Epoch 106/300
 - 3s - loss: 6.2338e-05 - acc: 1.0000 - val_loss: 0.0779 - val_acc: 0.9834
Epoch 107/300
 - 3s - loss: 6.0230e-05 - acc: 1.0000 - val_loss: 0.0787 - val_acc: 0.9834
Epoch 108/300
 - 3s - loss: 5.7820e-05 - acc: 1.0000 - val_loss: 0.0789 - val_acc: 0.9834
Epoch 109/300
 - 3s - loss: 5.5781e-05 - acc: 1.0000 - val_loss: 0.0790 - val_acc: 0.9833
Epoch 110/300
 - 3s - loss: 5.3571e-05 - acc: 1.0000 - val_loss: 0.0788 - val_acc: 0.9833
Epoch 111/300
 - 3s - loss: 5.1880e-05 - acc: 1.0000 - val_loss: 0.0794 - val_acc: 0.9832
Epoch 112/300
 - 3s - loss: 4.9978e-05 - acc: 1.0000 - val_loss: 0.0794 - val_acc: 0.9833
Epoch 113/300
 - 3s - loss: 4.7937e-05 - acc: 1.0000 - val_loss: 0.0796 - val_acc: 0.9833
Epoch 114/300
 - 3s - loss: 4.6254e-05 - acc: 1.0000 - val_loss: 0.0795 - val_acc: 0.9830
Epoch 115/300
 - 3s - loss: 4.4681e-05 - acc: 1.0000 - val_loss: 0.0793 - val_acc: 0.9836
Epoch 116/300
 - 3s - loss: 4.3043e-05 - acc: 1.0000 - val_loss: 0.0802 - val_acc: 0.9832
Epoch 117/300
 - 3s - loss: 4.1942e-05 - acc: 1.0000 - val_loss: 0.0800 - val_acc: 0.9831
Epoch 118/300
 - 3s - loss: 4.0442e-05 - acc: 1.0000 - val_loss: 0.0804 - val_acc: 0.9834
Epoch 119/300
 - 3s - loss: 3.8788e-05 - acc: 1.0000 - val_loss: 0.0805 - val_acc: 0.9836
Epoch 120/300
 - 3s - loss: 3.7390e-05 - acc: 1.0000 - val_loss: 0.0810 - val_acc: 0.9832
Epoch 121/300
 - 3s - loss: 3.5820e-05 - acc: 1.0000 - val_loss: 0.0813 - val_acc: 0.9835
Epoch 122/300
 - 3s - loss: 3.4850e-05 - acc: 1.0000 - val_loss: 0.0814 - val_acc: 0.9832
Epoch 123/300
 - 3s - loss: 3.3479e-05 - acc: 1.0000 - val_loss: 0.0809 - val_acc: 0.9834
Epoch 124/300
 - 3s - loss: 3.2486e-05 - acc: 1.0000 - val_loss: 0.0818 - val_acc: 0.9836
Epoch 125/300
 - 3s - loss: 3.1350e-05 - acc: 1.0000 - val_loss: 0.0816 - val_acc: 0.9832
Epoch 126/300
 - 3s - loss: 3.0118e-05 - acc: 1.0000 - val_loss: 0.0823 - val_acc: 0.9834
Epoch 127/300
 - 3s - loss: 2.9347e-05 - acc: 1.0000 - val_loss: 0.0815 - val_acc: 0.9834
Epoch 128/300
 - 3s - loss: 2.8244e-05 - acc: 1.0000 - val_loss: 0.0824 - val_acc: 0.9833
Epoch 129/300
 - 3s - loss: 2.7450e-05 - acc: 1.0000 - val_loss: 0.0825 - val_acc: 0.9834
Epoch 130/300
 - 3s - loss: 2.6489e-05 - acc: 1.0000 - val_loss: 0.0824 - val_acc: 0.9835
Epoch 131/300
 - 3s - loss: 2.5162e-05 - acc: 1.0000 - val_loss: 0.0826 - val_acc: 0.9834
Epoch 132/300
 - 3s - loss: 2.4514e-05 - acc: 1.0000 - val_loss: 0.0831 - val_acc: 0.9835
Epoch 133/300
 - 3s - loss: 2.3663e-05 - acc: 1.0000 - val_loss: 0.0831 - val_acc: 0.9834
Epoch 134/300
 - 3s - loss: 2.2848e-05 - acc: 1.0000 - val_loss: 0.0835 - val_acc: 0.9836
Epoch 135/300
 - 3s - loss: 2.2102e-05 - acc: 1.0000 - val_loss: 0.0831 - val_acc: 0.9834
Epoch 136/300
 - 3s - loss: 2.1387e-05 - acc: 1.0000 - val_loss: 0.0837 - val_acc: 0.9835
Epoch 137/300
 - 3s - loss: 2.0636e-05 - acc: 1.0000 - val_loss: 0.0836 - val_acc: 0.9836
Epoch 138/300
 - 3s - loss: 1.9981e-05 - acc: 1.0000 - val_loss: 0.0840 - val_acc: 0.9833
Epoch 139/300
 - 3s - loss: 1.9222e-05 - acc: 1.0000 - val_loss: 0.0841 - val_acc: 0.9837
Epoch 140/300
 - 3s - loss: 1.8659e-05 - acc: 1.0000 - val_loss: 0.0841 - val_acc: 0.9836
Epoch 141/300
 - 3s - loss: 1.7933e-05 - acc: 1.0000 - val_loss: 0.0849 - val_acc: 0.9836
Epoch 142/300
 - 3s - loss: 1.7273e-05 - acc: 1.0000 - val_loss: 0.0847 - val_acc: 0.9833
Epoch 143/300
 - 3s - loss: 1.6808e-05 - acc: 1.0000 - val_loss: 0.0850 - val_acc: 0.9837
Epoch 144/300
 - 3s - loss: 1.6208e-05 - acc: 1.0000 - val_loss: 0.0854 - val_acc: 0.9837
Epoch 145/300
 - 3s - loss: 1.5627e-05 - acc: 1.0000 - val_loss: 0.0851 - val_acc: 0.9838
Epoch 146/300
 - 3s - loss: 1.5067e-05 - acc: 1.0000 - val_loss: 0.0855 - val_acc: 0.9837
Epoch 147/300
 - 3s - loss: 1.4659e-05 - acc: 1.0000 - val_loss: 0.0857 - val_acc: 0.9835
Epoch 148/300
 - 3s - loss: 1.4082e-05 - acc: 1.0000 - val_loss: 0.0865 - val_acc: 0.9835
Epoch 149/300
 - 3s - loss: 1.3644e-05 - acc: 1.0000 - val_loss: 0.0866 - val_acc: 0.9835
Epoch 150/300
 - 3s - loss: 1.3211e-05 - acc: 1.0000 - val_loss: 0.0863 - val_acc: 0.9836
Epoch 151/300
 - 3s - loss: 1.2773e-05 - acc: 1.0000 - val_loss: 0.0864 - val_acc: 0.9834
Epoch 152/300
 - 3s - loss: 1.2374e-05 - acc: 1.0000 - val_loss: 0.0865 - val_acc: 0.9837
Epoch 153/300
 - 3s - loss: 1.1972e-05 - acc: 1.0000 - val_loss: 0.0865 - val_acc: 0.9838
Epoch 154/300
 - 3s - loss: 1.1606e-05 - acc: 1.0000 - val_loss: 0.0870 - val_acc: 0.9837
Epoch 155/300
 - 3s - loss: 1.1133e-05 - acc: 1.0000 - val_loss: 0.0871 - val_acc: 0.9837
Epoch 156/300
 - 3s - loss: 1.0725e-05 - acc: 1.0000 - val_loss: 0.0874 - val_acc: 0.9835
Epoch 157/300
 - 3s - loss: 1.0437e-05 - acc: 1.0000 - val_loss: 0.0876 - val_acc: 0.9836
Epoch 158/300
 - 3s - loss: 1.0004e-05 - acc: 1.0000 - val_loss: 0.0877 - val_acc: 0.9835
Epoch 159/300
 - 3s - loss: 9.7196e-06 - acc: 1.0000 - val_loss: 0.0878 - val_acc: 0.9837
Epoch 160/300
 - 3s - loss: 9.4301e-06 - acc: 1.0000 - val_loss: 0.0880 - val_acc: 0.9837
Epoch 161/300
 - 3s - loss: 9.1314e-06 - acc: 1.0000 - val_loss: 0.0879 - val_acc: 0.9835
Epoch 162/300
 - 3s - loss: 8.8318e-06 - acc: 1.0000 - val_loss: 0.0882 - val_acc: 0.9835
Epoch 163/300
 - 3s - loss: 8.5024e-06 - acc: 1.0000 - val_loss: 0.0882 - val_acc: 0.9836
Epoch 164/300
 - 3s - loss: 8.2271e-06 - acc: 1.0000 - val_loss: 0.0885 - val_acc: 0.9836
Epoch 165/300
 - 3s - loss: 7.9456e-06 - acc: 1.0000 - val_loss: 0.0889 - val_acc: 0.9835
Epoch 166/300
 - 3s - loss: 7.7393e-06 - acc: 1.0000 - val_loss: 0.0892 - val_acc: 0.9837
Epoch 167/300
 - 3s - loss: 7.5925e-06 - acc: 1.0000 - val_loss: 0.0891 - val_acc: 0.9839
Epoch 168/300
 - 3s - loss: 7.2755e-06 - acc: 1.0000 - val_loss: 0.0894 - val_acc: 0.9836
Epoch 169/300
 - 3s - loss: 7.0094e-06 - acc: 1.0000 - val_loss: 0.0895 - val_acc: 0.9836
Epoch 170/300
 - 3s - loss: 6.7810e-06 - acc: 1.0000 - val_loss: 0.0899 - val_acc: 0.9838
Epoch 171/300
 - 3s - loss: 6.5739e-06 - acc: 1.0000 - val_loss: 0.0898 - val_acc: 0.9834
Epoch 172/300
 - 3s - loss: 6.3367e-06 - acc: 1.0000 - val_loss: 0.0898 - val_acc: 0.9836
Epoch 173/300
 - 3s - loss: 6.1390e-06 - acc: 1.0000 - val_loss: 0.0899 - val_acc: 0.9837
Epoch 174/300
 - 4s - loss: 5.9376e-06 - acc: 1.0000 - val_loss: 0.0902 - val_acc: 0.9833
Epoch 175/300
 - 3s - loss: 5.8013e-06 - acc: 1.0000 - val_loss: 0.0901 - val_acc: 0.9837
Epoch 176/300
 - 3s - loss: 5.5484e-06 - acc: 1.0000 - val_loss: 0.0908 - val_acc: 0.9838
Epoch 177/300
 - 3s - loss: 5.4190e-06 - acc: 1.0000 - val_loss: 0.0906 - val_acc: 0.9837
Epoch 178/300
 - 3s - loss: 5.2406e-06 - acc: 1.0000 - val_loss: 0.0909 - val_acc: 0.9838
Epoch 179/300
 - 3s - loss: 5.0728e-06 - acc: 1.0000 - val_loss: 0.0912 - val_acc: 0.9837
Epoch 180/300
 - 3s - loss: 4.9085e-06 - acc: 1.0000 - val_loss: 0.0911 - val_acc: 0.9837
Epoch 181/300
 - 3s - loss: 4.7424e-06 - acc: 1.0000 - val_loss: 0.0913 - val_acc: 0.9839
Epoch 182/300
 - 3s - loss: 4.5958e-06 - acc: 1.0000 - val_loss: 0.0914 - val_acc: 0.9838
Epoch 183/300
 - 3s - loss: 4.4556e-06 - acc: 1.0000 - val_loss: 0.0920 - val_acc: 0.9838
Epoch 184/300
 - 3s - loss: 4.3136e-06 - acc: 1.0000 - val_loss: 0.0920 - val_acc: 0.9837
Epoch 185/300
 - 3s - loss: 4.1718e-06 - acc: 1.0000 - val_loss: 0.0917 - val_acc: 0.9837
Epoch 186/300
 - 3s - loss: 4.0398e-06 - acc: 1.0000 - val_loss: 0.0921 - val_acc: 0.9835
Epoch 187/300
 - 3s - loss: 3.9448e-06 - acc: 1.0000 - val_loss: 0.0923 - val_acc: 0.9837
Epoch 188/300
 - 3s - loss: 3.7964e-06 - acc: 1.0000 - val_loss: 0.0923 - val_acc: 0.9839
Epoch 189/300
 - 3s - loss: 3.6852e-06 - acc: 1.0000 - val_loss: 0.0925 - val_acc: 0.9837
Epoch 190/300
 - 3s - loss: 3.5655e-06 - acc: 1.0000 - val_loss: 0.0928 - val_acc: 0.9838
Epoch 191/300
 - 3s - loss: 3.4688e-06 - acc: 1.0000 - val_loss: 0.0928 - val_acc: 0.9837
Epoch 192/300
 - 3s - loss: 3.3602e-06 - acc: 1.0000 - val_loss: 0.0931 - val_acc: 0.9837
Epoch 193/300
 - 3s - loss: 3.2580e-06 - acc: 1.0000 - val_loss: 0.0931 - val_acc: 0.9835
Epoch 194/300
 - 3s - loss: 3.1606e-06 - acc: 1.0000 - val_loss: 0.0930 - val_acc: 0.9834
Epoch 195/300
 - 3s - loss: 3.0486e-06 - acc: 1.0000 - val_loss: 0.0934 - val_acc: 0.9836
Epoch 196/300
 - 3s - loss: 2.9646e-06 - acc: 1.0000 - val_loss: 0.0936 - val_acc: 0.9838
Epoch 197/300
 - 3s - loss: 2.8880e-06 - acc: 1.0000 - val_loss: 0.0940 - val_acc: 0.9837
Epoch 198/300
 - 3s - loss: 2.8002e-06 - acc: 1.0000 - val_loss: 0.0937 - val_acc: 0.9837
Epoch 199/300
 - 3s - loss: 2.7058e-06 - acc: 1.0000 - val_loss: 0.0940 - val_acc: 0.9836
Epoch 200/300
 - 3s - loss: 2.6190e-06 - acc: 1.0000 - val_loss: 0.0940 - val_acc: 0.9836
Epoch 201/300
 - 3s - loss: 2.5547e-06 - acc: 1.0000 - val_loss: 0.0945 - val_acc: 0.9835
Epoch 202/300
 - 3s - loss: 2.4747e-06 - acc: 1.0000 - val_loss: 0.0946 - val_acc: 0.9838
Epoch 203/300
 - 3s - loss: 2.3913e-06 - acc: 1.0000 - val_loss: 0.0949 - val_acc: 0.9837
Epoch 204/300
 - 3s - loss: 2.3268e-06 - acc: 1.0000 - val_loss: 0.0946 - val_acc: 0.9836
Epoch 205/300
 - 3s - loss: 2.2548e-06 - acc: 1.0000 - val_loss: 0.0947 - val_acc: 0.9836
Epoch 206/300
 - 3s - loss: 2.1882e-06 - acc: 1.0000 - val_loss: 0.0948 - val_acc: 0.9835
Epoch 207/300
 - 3s - loss: 2.1258e-06 - acc: 1.0000 - val_loss: 0.0951 - val_acc: 0.9835
Epoch 208/300
 - 3s - loss: 2.0708e-06 - acc: 1.0000 - val_loss: 0.0954 - val_acc: 0.9834
Epoch 209/300
 - 3s - loss: 2.0087e-06 - acc: 1.0000 - val_loss: 0.0956 - val_acc: 0.9836
Epoch 210/300
 - 3s - loss: 1.9515e-06 - acc: 1.0000 - val_loss: 0.0957 - val_acc: 0.9837
Epoch 211/300
 - 3s - loss: 1.8939e-06 - acc: 1.0000 - val_loss: 0.0955 - val_acc: 0.9838
Epoch 212/300
 - 3s - loss: 1.8318e-06 - acc: 1.0000 - val_loss: 0.0961 - val_acc: 0.9838
Epoch 213/300
 - 3s - loss: 1.7768e-06 - acc: 1.0000 - val_loss: 0.0965 - val_acc: 0.9838
Epoch 214/300
 - 3s - loss: 1.7453e-06 - acc: 1.0000 - val_loss: 0.0965 - val_acc: 0.9837
Epoch 215/300
 - 3s - loss: 1.6773e-06 - acc: 1.0000 - val_loss: 0.0963 - val_acc: 0.9834
Epoch 216/300
 - 3s - loss: 1.6451e-06 - acc: 1.0000 - val_loss: 0.0964 - val_acc: 0.9837
Epoch 217/300
 - 3s - loss: 1.5821e-06 - acc: 1.0000 - val_loss: 0.0966 - val_acc: 0.9836
Epoch 218/300
 - 3s - loss: 1.5421e-06 - acc: 1.0000 - val_loss: 0.0968 - val_acc: 0.9835
Epoch 219/300
 - 3s - loss: 1.5006e-06 - acc: 1.0000 - val_loss: 0.0968 - val_acc: 0.9838
Epoch 220/300
 - 3s - loss: 1.4623e-06 - acc: 1.0000 - val_loss: 0.0972 - val_acc: 0.9836
Epoch 221/300
 - 3s - loss: 1.4146e-06 - acc: 1.0000 - val_loss: 0.0971 - val_acc: 0.9837
Epoch 222/300
 - 3s - loss: 1.3827e-06 - acc: 1.0000 - val_loss: 0.0972 - val_acc: 0.9837
Epoch 223/300
 - 3s - loss: 1.3375e-06 - acc: 1.0000 - val_loss: 0.0975 - val_acc: 0.9837
Epoch 224/300
 - 3s - loss: 1.3092e-06 - acc: 1.0000 - val_loss: 0.0977 - val_acc: 0.9838
Epoch 225/300
 - 3s - loss: 1.2728e-06 - acc: 1.0000 - val_loss: 0.0977 - val_acc: 0.9839
Epoch 226/300
 - 3s - loss: 1.2347e-06 - acc: 1.0000 - val_loss: 0.0978 - val_acc: 0.9834
Epoch 227/300
 - 3s - loss: 1.1935e-06 - acc: 1.0000 - val_loss: 0.0983 - val_acc: 0.9837
Epoch 228/300
 - 3s - loss: 1.1687e-06 - acc: 1.0000 - val_loss: 0.0985 - val_acc: 0.9838
Epoch 229/300
 - 3s - loss: 1.1407e-06 - acc: 1.0000 - val_loss: 0.0983 - val_acc: 0.9839
Epoch 230/300
 - 3s - loss: 1.1039e-06 - acc: 1.0000 - val_loss: 0.0986 - val_acc: 0.9834
Epoch 231/300
 - 3s - loss: 1.0760e-06 - acc: 1.0000 - val_loss: 0.0987 - val_acc: 0.9839
Epoch 232/300
 - 3s - loss: 1.0546e-06 - acc: 1.0000 - val_loss: 0.0987 - val_acc: 0.9837
Epoch 233/300
 - 3s - loss: 1.0228e-06 - acc: 1.0000 - val_loss: 0.0989 - val_acc: 0.9835
Epoch 234/300
 - 3s - loss: 9.9359e-07 - acc: 1.0000 - val_loss: 0.0990 - val_acc: 0.9836
Epoch 235/300
 - 3s - loss: 9.6863e-07 - acc: 1.0000 - val_loss: 0.0993 - val_acc: 0.9838
Epoch 236/300
 - 3s - loss: 9.4514e-07 - acc: 1.0000 - val_loss: 0.0993 - val_acc: 0.9837
Epoch 237/300
 - 3s - loss: 9.2869e-07 - acc: 1.0000 - val_loss: 0.0993 - val_acc: 0.9836
Epoch 238/300
 - 3s - loss: 8.9592e-07 - acc: 1.0000 - val_loss: 0.0995 - val_acc: 0.9836
Epoch 239/300
 - 3s - loss: 8.7460e-07 - acc: 1.0000 - val_loss: 0.0996 - val_acc: 0.9835
Epoch 240/300
 - 3s - loss: 8.5597e-07 - acc: 1.0000 - val_loss: 0.0997 - val_acc: 0.9838
Epoch 241/300
 - 3s - loss: 8.3255e-07 - acc: 1.0000 - val_loss: 0.1000 - val_acc: 0.9836
Epoch 242/300
 - 3s - loss: 8.1206e-07 - acc: 1.0000 - val_loss: 0.1000 - val_acc: 0.9836
Epoch 243/300
 - 3s - loss: 7.9508e-07 - acc: 1.0000 - val_loss: 0.1005 - val_acc: 0.9838
Epoch 244/300
 - 3s - loss: 7.7165e-07 - acc: 1.0000 - val_loss: 0.1003 - val_acc: 0.9837
Epoch 245/300
 - 3s - loss: 7.5228e-07 - acc: 1.0000 - val_loss: 0.1007 - val_acc: 0.9839
Epoch 246/300
 - 3s - loss: 7.3610e-07 - acc: 1.0000 - val_loss: 0.1003 - val_acc: 0.9836
Epoch 247/300
 - 3s - loss: 7.1769e-07 - acc: 1.0000 - val_loss: 0.1007 - val_acc: 0.9837
Epoch 248/300
 - 3s - loss: 6.9995e-07 - acc: 1.0000 - val_loss: 0.1010 - val_acc: 0.9839
Epoch 249/300
 - 3s - loss: 6.8623e-07 - acc: 1.0000 - val_loss: 0.1009 - val_acc: 0.9837
Epoch 250/300
 - 3s - loss: 6.6548e-07 - acc: 1.0000 - val_loss: 0.1012 - val_acc: 0.9834
Epoch 251/300
 - 3s - loss: 6.5409e-07 - acc: 1.0000 - val_loss: 0.1014 - val_acc: 0.9839
Epoch 252/300
 - 3s - loss: 6.4120e-07 - acc: 1.0000 - val_loss: 0.1015 - val_acc: 0.9838
Epoch 253/300
 - 3s - loss: 6.2190e-07 - acc: 1.0000 - val_loss: 0.1017 - val_acc: 0.9834
Epoch 254/300
 - 3s - loss: 6.0925e-07 - acc: 1.0000 - val_loss: 0.1020 - val_acc: 0.9835
Epoch 255/300
 - 3s - loss: 5.9585e-07 - acc: 1.0000 - val_loss: 0.1021 - val_acc: 0.9840
Epoch 256/300
 - 3s - loss: 5.8089e-07 - acc: 1.0000 - val_loss: 0.1018 - val_acc: 0.9837
Epoch 257/300
 - 3s - loss: 5.7196e-07 - acc: 1.0000 - val_loss: 0.1021 - val_acc: 0.9835
Epoch 258/300
 - 3s - loss: 5.5789e-07 - acc: 1.0000 - val_loss: 0.1025 - val_acc: 0.9838
Epoch 259/300
 - 3s - loss: 5.4679e-07 - acc: 1.0000 - val_loss: 0.1023 - val_acc: 0.9840
Epoch 260/300
 - 3s - loss: 5.3333e-07 - acc: 1.0000 - val_loss: 0.1021 - val_acc: 0.9839
Epoch 261/300
 - 3s - loss: 5.2341e-07 - acc: 1.0000 - val_loss: 0.1026 - val_acc: 0.9836
Epoch 262/300
 - 3s - loss: 5.0919e-07 - acc: 1.0000 - val_loss: 0.1027 - val_acc: 0.9838
Epoch 263/300
 - 3s - loss: 5.0080e-07 - acc: 1.0000 - val_loss: 0.1028 - val_acc: 0.9840
Epoch 264/300
 - 3s - loss: 4.8936e-07 - acc: 1.0000 - val_loss: 0.1027 - val_acc: 0.9836
Epoch 265/300
 - 3s - loss: 4.8162e-07 - acc: 1.0000 - val_loss: 0.1029 - val_acc: 0.9839
Epoch 266/300
 - 3s - loss: 4.7047e-07 - acc: 1.0000 - val_loss: 0.1031 - val_acc: 0.9838
Epoch 267/300
 - 3s - loss: 4.5974e-07 - acc: 1.0000 - val_loss: 0.1031 - val_acc: 0.9838
Epoch 268/300
 - 3s - loss: 4.5105e-07 - acc: 1.0000 - val_loss: 0.1034 - val_acc: 0.9840
Epoch 269/300
 - 3s - loss: 4.4195e-07 - acc: 1.0000 - val_loss: 0.1037 - val_acc: 0.9838
Epoch 270/300
 - 3s - loss: 4.3360e-07 - acc: 1.0000 - val_loss: 0.1036 - val_acc: 0.9838
Epoch 271/300
 - 3s - loss: 4.2704e-07 - acc: 1.0000 - val_loss: 0.1037 - val_acc: 0.9840
Epoch 272/300
 - 3s - loss: 4.1747e-07 - acc: 1.0000 - val_loss: 0.1040 - val_acc: 0.9838
Epoch 273/300
 - 3s - loss: 4.0963e-07 - acc: 1.0000 - val_loss: 0.1041 - val_acc: 0.9841
Epoch 274/300
 - 3s - loss: 4.0081e-07 - acc: 1.0000 - val_loss: 0.1045 - val_acc: 0.9839
Epoch 275/300
 - 3s - loss: 3.9514e-07 - acc: 1.0000 - val_loss: 0.1043 - val_acc: 0.9840
Epoch 276/300
 - 3s - loss: 3.8734e-07 - acc: 1.0000 - val_loss: 0.1042 - val_acc: 0.9842
Epoch 277/300
 - 3s - loss: 3.8035e-07 - acc: 1.0000 - val_loss: 0.1044 - val_acc: 0.9840
Epoch 278/300
 - 3s - loss: 3.7277e-07 - acc: 1.0000 - val_loss: 0.1045 - val_acc: 0.9840
Epoch 279/300
 - 3s - loss: 3.6763e-07 - acc: 1.0000 - val_loss: 0.1047 - val_acc: 0.9839
Epoch 280/300
 - 3s - loss: 3.6084e-07 - acc: 1.0000 - val_loss: 0.1048 - val_acc: 0.9839
Epoch 281/300
 - 3s - loss: 3.5480e-07 - acc: 1.0000 - val_loss: 0.1053 - val_acc: 0.9840
Epoch 282/300
 - 3s - loss: 3.4911e-07 - acc: 1.0000 - val_loss: 0.1050 - val_acc: 0.9838
Epoch 283/300
 - 3s - loss: 3.4319e-07 - acc: 1.0000 - val_loss: 0.1052 - val_acc: 0.9842
Epoch 284/300
 - 3s - loss: 3.3739e-07 - acc: 1.0000 - val_loss: 0.1056 - val_acc: 0.9838
Epoch 285/300
 - 3s - loss: 3.3219e-07 - acc: 1.0000 - val_loss: 0.1056 - val_acc: 0.9839
Epoch 286/300
 - 3s - loss: 3.2867e-07 - acc: 1.0000 - val_loss: 0.1054 - val_acc: 0.9838
Epoch 287/300
 - 3s - loss: 3.2200e-07 - acc: 1.0000 - val_loss: 0.1055 - val_acc: 0.9841
Epoch 288/300
 - 3s - loss: 3.1716e-07 - acc: 1.0000 - val_loss: 0.1058 - val_acc: 0.9839
Epoch 289/300
 - 3s - loss: 3.1087e-07 - acc: 1.0000 - val_loss: 0.1058 - val_acc: 0.9840
Epoch 290/300
 - 3s - loss: 3.0634e-07 - acc: 1.0000 - val_loss: 0.1059 - val_acc: 0.9842
Epoch 291/300
 - 3s - loss: 3.0191e-07 - acc: 1.0000 - val_loss: 0.1063 - val_acc: 0.9838
Epoch 292/300
 - 3s - loss: 2.9703e-07 - acc: 1.0000 - val_loss: 0.1063 - val_acc: 0.9838
Epoch 293/300
 - 3s - loss: 2.9311e-07 - acc: 1.0000 - val_loss: 0.1062 - val_acc: 0.9837
Epoch 294/300
 - 3s - loss: 2.8903e-07 - acc: 1.0000 - val_loss: 0.1064 - val_acc: 0.9840
Epoch 295/300
 - 3s - loss: 2.8474e-07 - acc: 1.0000 - val_loss: 0.1066 - val_acc: 0.9840
Epoch 296/300
 - 3s - loss: 2.8142e-07 - acc: 1.0000 - val_loss: 0.1065 - val_acc: 0.9839
Epoch 297/300
 - 3s - loss: 2.7653e-07 - acc: 1.0000 - val_loss: 0.1070 - val_acc: 0.9839
Epoch 298/300
 - 3s - loss: 2.7363e-07 - acc: 1.0000 - val_loss: 0.1070 - val_acc: 0.9841
Epoch 299/300
 - 3s - loss: 2.6964e-07 - acc: 1.0000 - val_loss: 0.1071 - val_acc: 0.9838
Epoch 300/300
 - 3s - loss: 2.6562e-07 - acc: 1.0000 - val_loss: 0.1069 - val_acc: 0.9839
Baseline Error: 1.61%

INTERPRET

In [0]:
# PLOT THE RESULTS OF THE MODEL

def plot_train_curve(history):
    colors = ['#e66101','#fdb863','#b2abd2','#5e3c99']
    accuracy = history.history['acc']
    val_accuracy = history.history['val_acc']
    loss = history.history['loss']
    val_loss = history.history['val_loss']
    epochs = range(len(accuracy))
    with plt.style.context("ggplot"):
        plt.figure(figsize=(8, 8/1.618))
        plt.plot(epochs, accuracy, marker='o', c=colors[3], label='Training accuracy')
        plt.plot(epochs, val_accuracy, c=colors[0], label='Validation accuracy')
        plt.title('Training and validation accuracy')
        plt.legend()
        plt.figure(figsize=(8, 8/1.618))
        plt.plot(epochs, loss, marker='o', c=colors[3], label='Training loss')
        plt.plot(epochs, val_loss, c=colors[0], label='Validation loss')
        plt.title('Training and validation loss')
        plt.legend()
        plt.show()
    
plot_train_curve(history)
In [0]: