Neural Network test using Keras

In [1]:
%matplotlib inline
from os.path import join

import numpy as np
import matplotlib.pyplot as plt

from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Dense
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint
from keras.models import load_model
Using TensorFlow backend.

Generate data

Mean subtraction: Input data already has zero-mean and no need to demean.

cf) http://cs231n.github.io/neural-networks-case-study/

In [2]:
N = 100 # number of points per class
D = 2 # dimensionality
K = 3 # number of classes
X = np.zeros((N * K,D)) # data matrix (each row = single example)
y = np.zeros(N * K, dtype='uint8') # class labels
for j in range(K):
  ix = range(N * j, N * (j + 1))
  r = np.linspace(0.0, 1, N) # radius
  t = np.linspace(j * 4,(j + 1) * 4,N) + np.random.randn(N) * 0.2 # theta
  X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
  y[ix] = j
In [3]:
fig, ax = plt.subplots()
ax.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap=plt.cm.Spectral);
In [4]:
X[:5, :]
Out[4]:
array([[ -0.00000000e+00,   0.00000000e+00],
       [  5.42876968e-05,   1.01008642e-02],
       [  8.10273417e-03,   1.85058726e-02],
       [  5.91705102e-03,   2.97197267e-02],
       [  1.93258307e-02,   3.54823724e-02]])
In [5]:
y
Out[5]:
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
       2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
       2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
       2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
       2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
       2], dtype=uint8)
In [6]:
print(X.shape)
print(y.shape)
(300, 2)
(300,)

Build model via Keras

Initial weight: Use default params. kernel_initializer='glorot_uniform', bias_initializer='zeros'
Regularisation: No regularisation
y: Vector y needs to be converted by to_categorical()

In [7]:
model = Sequential()
model.add(Dense(100, input_dim=2, activation='relu'))
model.add(Dense(3, activation='softmax'))
model.compile(optimizer=Adam(), loss='categorical_crossentropy', metrics=['accuracy'])
In [8]:
model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 100)               300       
_________________________________________________________________
dense_2 (Dense)              (None, 3)                 303       
=================================================================
Total params: 603
Trainable params: 603
Non-trainable params: 0
_________________________________________________________________
In [9]:
model_checkpoint = ModelCheckpoint(filepath=join('output', 'keras_test.{epoch:02d}.hdf5'), verbose=0)
In [10]:
history = model.fit(X, to_categorical(y, 3), batch_size=10, epochs=100, verbose=1, callbacks=[model_checkpoint])
Epoch 1/100
300/300 [==============================] - 0s 1ms/step - loss: 1.0659 - acc: 0.4800
Epoch 2/100
300/300 [==============================] - 0s 474us/step - loss: 0.9880 - acc: 0.5633
Epoch 3/100
300/300 [==============================] - 0s 456us/step - loss: 0.9245 - acc: 0.5600
Epoch 4/100
300/300 [==============================] - 0s 492us/step - loss: 0.8691 - acc: 0.5233
Epoch 5/100
300/300 [==============================] - 0s 348us/step - loss: 0.8247 - acc: 0.5367
Epoch 6/100
300/300 [==============================] - 0s 398us/step - loss: 0.7934 - acc: 0.5333
Epoch 7/100
300/300 [==============================] - 0s 311us/step - loss: 0.7661 - acc: 0.5367
Epoch 8/100
300/300 [==============================] - 0s 350us/step - loss: 0.7477 - acc: 0.5433
Epoch 9/100
300/300 [==============================] - 0s 396us/step - loss: 0.7316 - acc: 0.5500
Epoch 10/100
300/300 [==============================] - 0s 376us/step - loss: 0.7169 - acc: 0.5533
Epoch 11/100
300/300 [==============================] - 0s 344us/step - loss: 0.7018 - acc: 0.5667
Epoch 12/100
300/300 [==============================] - 0s 338us/step - loss: 0.6919 - acc: 0.5567
Epoch 13/100
300/300 [==============================] - 0s 365us/step - loss: 0.6809 - acc: 0.5700
Epoch 14/100
300/300 [==============================] - 0s 340us/step - loss: 0.6702 - acc: 0.5733
Epoch 15/100
300/300 [==============================] - 0s 416us/step - loss: 0.6587 - acc: 0.5833
Epoch 16/100
300/300 [==============================] - 0s 373us/step - loss: 0.6501 - acc: 0.6033
Epoch 17/100
300/300 [==============================] - 0s 317us/step - loss: 0.6403 - acc: 0.6033
Epoch 18/100
300/300 [==============================] - 0s 367us/step - loss: 0.6292 - acc: 0.6200
Epoch 19/100
300/300 [==============================] - 0s 292us/step - loss: 0.6210 - acc: 0.6233
Epoch 20/100
300/300 [==============================] - 0s 438us/step - loss: 0.6097 - acc: 0.6433
Epoch 21/100
300/300 [==============================] - 0s 291us/step - loss: 0.5998 - acc: 0.6533
Epoch 22/100
300/300 [==============================] - 0s 346us/step - loss: 0.5902 - acc: 0.6533
Epoch 23/100
300/300 [==============================] - 0s 339us/step - loss: 0.5809 - acc: 0.6500
Epoch 24/100
300/300 [==============================] - 0s 284us/step - loss: 0.5713 - acc: 0.6633
Epoch 25/100
300/300 [==============================] - 0s 368us/step - loss: 0.5610 - acc: 0.6800
Epoch 26/100
300/300 [==============================] - 0s 278us/step - loss: 0.5536 - acc: 0.7000
Epoch 27/100
300/300 [==============================] - 0s 362us/step - loss: 0.5436 - acc: 0.7100
Epoch 28/100
300/300 [==============================] - 0s 523us/step - loss: 0.5348 - acc: 0.7133
Epoch 29/100
300/300 [==============================] - 0s 322us/step - loss: 0.5248 - acc: 0.7333
Epoch 30/100
300/300 [==============================] - 0s 481us/step - loss: 0.5149 - acc: 0.7300
Epoch 31/100
300/300 [==============================] - 0s 282us/step - loss: 0.5045 - acc: 0.7500
Epoch 32/100
300/300 [==============================] - 0s 373us/step - loss: 0.4977 - acc: 0.7533
Epoch 33/100
300/300 [==============================] - 0s 335us/step - loss: 0.4877 - acc: 0.7633
Epoch 34/100
300/300 [==============================] - 0s 342us/step - loss: 0.4795 - acc: 0.7700
Epoch 35/100
300/300 [==============================] - 0s 334us/step - loss: 0.4698 - acc: 0.7767
Epoch 36/100
300/300 [==============================] - 0s 323us/step - loss: 0.4654 - acc: 0.7867
Epoch 37/100
300/300 [==============================] - 0s 310us/step - loss: 0.4540 - acc: 0.7900
Epoch 38/100
300/300 [==============================] - 0s 303us/step - loss: 0.4468 - acc: 0.8033
Epoch 39/100
300/300 [==============================] - 0s 332us/step - loss: 0.4387 - acc: 0.8033
Epoch 40/100
300/300 [==============================] - 0s 411us/step - loss: 0.4334 - acc: 0.8100
Epoch 41/100
300/300 [==============================] - 0s 308us/step - loss: 0.4233 - acc: 0.8100
Epoch 42/100
300/300 [==============================] - 0s 369us/step - loss: 0.4158 - acc: 0.8267
Epoch 43/100
300/300 [==============================] - 0s 362us/step - loss: 0.4087 - acc: 0.8233
Epoch 44/100
300/300 [==============================] - 0s 331us/step - loss: 0.4032 - acc: 0.8267
Epoch 45/100
300/300 [==============================] - 0s 304us/step - loss: 0.3962 - acc: 0.8300
Epoch 46/100
300/300 [==============================] - 0s 296us/step - loss: 0.3895 - acc: 0.8367
Epoch 47/100
300/300 [==============================] - 0s 349us/step - loss: 0.3828 - acc: 0.8400
Epoch 48/100
300/300 [==============================] - 0s 316us/step - loss: 0.3771 - acc: 0.8367
Epoch 49/100
300/300 [==============================] - 0s 345us/step - loss: 0.3712 - acc: 0.8433
Epoch 50/100
300/300 [==============================] - 0s 355us/step - loss: 0.3650 - acc: 0.8467
Epoch 51/100
300/300 [==============================] - 0s 339us/step - loss: 0.3585 - acc: 0.8467
Epoch 52/100
300/300 [==============================] - 0s 420us/step - loss: 0.3534 - acc: 0.8567
Epoch 53/100
300/300 [==============================] - 0s 311us/step - loss: 0.3477 - acc: 0.8500
Epoch 54/100
300/300 [==============================] - 0s 298us/step - loss: 0.3426 - acc: 0.8567
Epoch 55/100
300/300 [==============================] - 0s 333us/step - loss: 0.3369 - acc: 0.8667
Epoch 56/100
300/300 [==============================] - 0s 312us/step - loss: 0.3323 - acc: 0.8633
Epoch 57/100
300/300 [==============================] - 0s 335us/step - loss: 0.3269 - acc: 0.8567
Epoch 58/100
300/300 [==============================] - 0s 355us/step - loss: 0.3222 - acc: 0.8700
Epoch 59/100
300/300 [==============================] - 0s 358us/step - loss: 0.3155 - acc: 0.8633
Epoch 60/100
300/300 [==============================] - 0s 304us/step - loss: 0.3120 - acc: 0.8767
Epoch 61/100
300/300 [==============================] - 0s 299us/step - loss: 0.3072 - acc: 0.8667
Epoch 62/100
300/300 [==============================] - 0s 330us/step - loss: 0.3020 - acc: 0.8733
Epoch 63/100
300/300 [==============================] - 0s 419us/step - loss: 0.2982 - acc: 0.8800
Epoch 64/100
300/300 [==============================] - 0s 303us/step - loss: 0.2923 - acc: 0.8667
Epoch 65/100
300/300 [==============================] - 0s 308us/step - loss: 0.2888 - acc: 0.8700
Epoch 66/100
300/300 [==============================] - 0s 415us/step - loss: 0.2838 - acc: 0.8767
Epoch 67/100
300/300 [==============================] - 0s 346us/step - loss: 0.2802 - acc: 0.8867
Epoch 68/100
300/300 [==============================] - 0s 316us/step - loss: 0.2762 - acc: 0.8800
Epoch 69/100
300/300 [==============================] - 0s 328us/step - loss: 0.2727 - acc: 0.8867
Epoch 70/100
300/300 [==============================] - 0s 406us/step - loss: 0.2682 - acc: 0.8933
Epoch 71/100
300/300 [==============================] - 0s 317us/step - loss: 0.2657 - acc: 0.8900
Epoch 72/100
300/300 [==============================] - 0s 456us/step - loss: 0.2604 - acc: 0.8967
Epoch 73/100
300/300 [==============================] - 0s 333us/step - loss: 0.2562 - acc: 0.8967
Epoch 74/100
300/300 [==============================] - 0s 390us/step - loss: 0.2526 - acc: 0.9000
Epoch 75/100
300/300 [==============================] - 0s 401us/step - loss: 0.2509 - acc: 0.8967
Epoch 76/100
300/300 [==============================] - 0s 431us/step - loss: 0.2462 - acc: 0.9033
Epoch 77/100
300/300 [==============================] - 0s 433us/step - loss: 0.2422 - acc: 0.9000
Epoch 78/100
300/300 [==============================] - 0s 397us/step - loss: 0.2393 - acc: 0.9133
Epoch 79/100
300/300 [==============================] - 0s 486us/step - loss: 0.2367 - acc: 0.9100
Epoch 80/100
300/300 [==============================] - 0s 320us/step - loss: 0.2339 - acc: 0.9167
Epoch 81/100
300/300 [==============================] - 0s 289us/step - loss: 0.2296 - acc: 0.9133
Epoch 82/100
300/300 [==============================] - 0s 369us/step - loss: 0.2272 - acc: 0.9133
Epoch 83/100
300/300 [==============================] - 0s 424us/step - loss: 0.2242 - acc: 0.9167
Epoch 84/100
300/300 [==============================] - 0s 328us/step - loss: 0.2214 - acc: 0.9233
Epoch 85/100
300/300 [==============================] - 0s 305us/step - loss: 0.2186 - acc: 0.9400
Epoch 86/100
300/300 [==============================] - 0s 318us/step - loss: 0.2149 - acc: 0.9300
Epoch 87/100
300/300 [==============================] - 0s 329us/step - loss: 0.2120 - acc: 0.9300
Epoch 88/100
300/300 [==============================] - 0s 329us/step - loss: 0.2095 - acc: 0.9400
Epoch 89/100
300/300 [==============================] - 0s 346us/step - loss: 0.2065 - acc: 0.9433
Epoch 90/100
300/300 [==============================] - 0s 335us/step - loss: 0.2048 - acc: 0.9367
Epoch 91/100
300/300 [==============================] - 0s 345us/step - loss: 0.2013 - acc: 0.9433
Epoch 92/100
300/300 [==============================] - 0s 292us/step - loss: 0.1990 - acc: 0.9500
Epoch 93/100
300/300 [==============================] - 0s 360us/step - loss: 0.1971 - acc: 0.9400
Epoch 94/100
300/300 [==============================] - 0s 326us/step - loss: 0.1976 - acc: 0.9300
Epoch 95/100
300/300 [==============================] - 0s 427us/step - loss: 0.1938 - acc: 0.9433
Epoch 96/100
300/300 [==============================] - 0s 437us/step - loss: 0.1913 - acc: 0.9400
Epoch 97/100
300/300 [==============================] - 0s 305us/step - loss: 0.1873 - acc: 0.9467
Epoch 98/100
300/300 [==============================] - 0s 293us/step - loss: 0.1848 - acc: 0.9467
Epoch 99/100
300/300 [==============================] - 0s 438us/step - loss: 0.1831 - acc: 0.9467
Epoch 100/100
300/300 [==============================] - 0s 291us/step - loss: 0.1814 - acc: 0.9567

Result

In [11]:
fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(10, 4))
ax1.set_ylabel("Loss")
ax2.set_ylabel("Accuarcy")
ax1.set_xlabel("Epoch")
ax2.set_xlabel("Epoch")
ax1.plot(history.epoch, history.history["loss"])
ax2.plot(history.epoch, history.history["acc"]);

Predict

In [12]:
loaded_model = load_model(join('output', 'keras_test.100.hdf5'))
In [13]:
def traverse(o, tree_types=(list, tuple)):
    if isinstance(o, tree_types):
        for value in o:
            for subvalue in traverse(value, tree_types):
                yield subvalue
    else:
        yield o
In [14]:
n_linspace = 100
axis_grid = np.linspace(-1.5, 1.5, n_linspace)
grid_x, grid_y = np.meshgrid(axis_grid, axis_grid)
x_test = np.array([(i, j) for i, j in zip(traverse(grid_x.tolist()), traverse(grid_y.tolist()))])

Make predictions using the loaded model.

In [15]:
y_hat = loaded_model.predict(x_test)
y_hat_grid = np.argmax(y_hat, axis=1).reshape((n_linspace, n_linspace))
In [16]:
grid_x, grid_y = np.meshgrid(axis_grid, axis_grid)
fig, ax = plt.subplots()
ax.contourf(grid_x, grid_y, y_hat_grid, cmap=plt.cm.Spectral)
ax.scatter(X[:, 0], X[:, 1], c=y, s=40, edgecolors='black', cmap=plt.cm.Spectral);