Cainvas
Model Files
Driver_Drowsiness_Detection.h5
keras
Model
deepSea Compiled Models
Driver_Drowsiness_Detection.e…
deepSea
Ubuntu

Driver Drowsiness Detection using CNN

Credit: AITS Cainvas Community

Photo by Isaac on Dribbble

According to a report, around 40% of road accidents that happen on highways are caused by Drowsy Driving. This project aims at detecting whether a driver is feeling drowsy or is active while driving based on whether both the eyes of the driver are closed representing drowsiness or both the eyes are open using a CNN architecture.

Importing necessary libraries..

In [1]:
import numpy as np
import cv2
from tensorflow.keras.layers import Input, Conv2D, BatchNormalization, Dropout, Flatten, Dense, MaxPool2D
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.optimizers import Adam, SGD
import matplotlib.pyplot as plt
from sklearn.metrics import classification_report
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from sklearn.model_selection import train_test_split
from tensorflow.keras.utils import to_categorical
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import classification_report, confusion_matrix
import seaborn as sns
from tensorflow.keras.regularizers import l2
import tensorflow as tf
import pandas as pd
from imutils.face_utils import FaceAligner
from imutils.face_utils import rect_to_bb
import imutils
import dlib

Loading the dataset and the test images

You can upload your own test images also.

In [2]:
!wget https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/driver_drowsiness_detection_modified.zip
!unzip -qo driver_drowsiness_detection_modified.zip
--2021-07-05 11:50:27--  https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/driver_drowsiness_detection_modified.zip
Resolving cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)... 52.219.66.92
Connecting to cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)|52.219.66.92|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 127436131 (122M) [application/x-zip-compressed]
Saving to: ‘driver_drowsiness_detection_modified.zip.1’

driver_drowsiness_d 100%[===================>] 121.53M  97.3MB/s    in 1.2s    

2021-07-05 11:50:29 (97.3 MB/s) - ‘driver_drowsiness_detection_modified.zip.1’ saved [127436131/127436131]

Loading the images and labels and Preprocessing the Dataset

Input Shape of Images for the CNN model - (32, 32, 3)
Dataset os split into Train Set and Test Set with test set containing 20% of the total number of images
Labels -> 0 - Closed Eye
1 - Open Eye

In [3]:
data = np.load('driver_drowsiness_detection/dataset_compressed.npz', allow_pickle=True)
X = data['arr_0']
Y = data['arr_1']

X = list(X)
Y = list(Y)
print(len(X))
print(len(Y))
1452
1452
In [4]:
for i in range(len(X)):
    img = X[i]
    img = cv2.resize(img, (32, 32))
    X[i] = img
    
print(len(X))
print(X[0].shape)
1452
(32, 32, 3)
In [5]:
label_encoder = LabelEncoder()
Y = label_encoder.fit_transform(Y)
print(Y.shape)
print(Y[0])
print(set(Y))
(1452,)
0
{0, 1}
In [6]:
X = np.array(X)
Y = np.array(Y)
print(X.shape)
print(Y.shape)
(1452, 32, 32, 3)
(1452,)
In [7]:
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
In [8]:
print(X_train.shape)
print(Y_train.shape)
print(X_test.shape)
print(Y_test.shape)

Y_train = to_categorical(Y_train)
Y_test = to_categorical(Y_test)

print(X_train.shape)
print(Y_train.shape)
print(X_test.shape)
print(Y_test.shape)
(1161, 32, 32, 3)
(1161,)
(291, 32, 32, 3)
(291,)
(1161, 32, 32, 3)
(1161, 2)
(291, 32, 32, 3)
(291, 2)

Visualizing images of Closed Eye and Open Eye from the Dataset

In [9]:
figure1 = plt.figure(figsize=(5, 5))
idx_closed = np.where(Y==0)
img_closed = X[idx_closed[0][0]]
plt.imshow(img_closed)
plt.title('Image of Closed Eye representing Driver is sleeping')
plt.axis('off')
plt.show()

figure2 = plt.figure(figsize=(5, 5))
idx_open = np.where(Y==1)
img_open = X[idx_open[0][0]]
plt.imshow(img_open)
plt.title('Image of Open Eye representing Driver is not sleeping')
plt.axis('off')
plt.show()

Visualizing the Data Distribution in Train and Test Set

In [10]:
unique_train, count = np.unique(Y_train, return_counts=True)
plt.figure(figsize=(20, 10))
sns.barplot(unique_train, count).set_title("Number of training images per category:")
plt.show()
/opt/tljh/user/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variables as keyword args: x, y. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  FutureWarning
In [11]:
unique_test, count_test = np.unique(Y_test, return_counts=True)
plt.figure(figsize=(20, 10))
sns.barplot(unique_test, count_test).set_title("Number of training images per category:")
plt.show()
/opt/tljh/user/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variables as keyword args: x, y. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  FutureWarning

As can be seen from the bar plots that the dataset is already balanced so no need to perform dataset balancing

Defining the Model Architecture

In [12]:
def driver_drowsiness_detection_model(input_shape=(32, 32, 3)):
    model = Sequential()
    model.add(Input(shape=input_shape))
    model.add(Conv2D(32, (3, 3), padding='same', strides=(1, 1), name='conv1', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Conv2D(32, (3, 3), padding='same', strides=(1, 1), name='conv2', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Dropout(0.2))
    model.add(MaxPool2D((2, 2), strides=(2, 2)))
    
    model.add(Conv2D(64, (3, 3), padding='same', strides=(1, 1), name='conv3', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(MaxPool2D((2, 2), strides=(2, 2)))
    model.add(Conv2D(64, (3, 3), padding='same', strides=(1, 1), name='conv4', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Dropout(0.3))
    model.add(MaxPool2D((2, 2), strides=(2, 2)))
    
    model.add(Conv2D(64, (3, 3), padding='same', strides=(1, 1), name='conv5', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Conv2D(64, (3, 3), padding='same', strides=(1, 1), name='conv6', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Conv2D(64, (3, 3), padding='same', strides=(1, 1), name='conv7', activation='relu', 
                     kernel_initializer=glorot_uniform(seed=0)))
    model.add(BatchNormalization())
    model.add(Dropout(0.4))
    model.add(MaxPool2D((2, 2), strides=(2, 2)))
    
    
    model.add(Flatten())
    model.add(Dense(128, activation='relu', kernel_initializer=glorot_uniform(seed=0), name='fc1'))
    model.add(Dropout(0.5))
    model.add(Dense(128, activation='relu', kernel_initializer=glorot_uniform(seed=0), name='fc2'))
    model.add(Dropout(0.5))
    model.add(Dense(2, activation='softmax', kernel_initializer=glorot_uniform(seed=0), name='fc3'))
    
    optimizer = Adam(0.0001)
    model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
    return model
In [13]:
model= driver_drowsiness_detection_model(input_shape=(32, 32, 3))
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 32, 32, 32)        896       
_________________________________________________________________
batch_normalization (BatchNo (None, 32, 32, 32)        128       
_________________________________________________________________
conv2 (Conv2D)               (None, 32, 32, 32)        9248      
_________________________________________________________________
batch_normalization_1 (Batch (None, 32, 32, 32)        128       
_________________________________________________________________
dropout (Dropout)            (None, 32, 32, 32)        0         
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 16, 16, 32)        0         
_________________________________________________________________
conv3 (Conv2D)               (None, 16, 16, 64)        18496     
_________________________________________________________________
batch_normalization_2 (Batch (None, 16, 16, 64)        256       
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 8, 8, 64)          0         
_________________________________________________________________
conv4 (Conv2D)               (None, 8, 8, 64)          36928     
_________________________________________________________________
batch_normalization_3 (Batch (None, 8, 8, 64)          256       
_________________________________________________________________
dropout_1 (Dropout)          (None, 8, 8, 64)          0         
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 4, 4, 64)          0         
_________________________________________________________________
conv5 (Conv2D)               (None, 4, 4, 64)          36928     
_________________________________________________________________
batch_normalization_4 (Batch (None, 4, 4, 64)          256       
_________________________________________________________________
conv6 (Conv2D)               (None, 4, 4, 64)          36928     
_________________________________________________________________
batch_normalization_5 (Batch (None, 4, 4, 64)          256       
_________________________________________________________________
conv7 (Conv2D)               (None, 4, 4, 64)          36928     
_________________________________________________________________
batch_normalization_6 (Batch (None, 4, 4, 64)          256       
_________________________________________________________________
dropout_2 (Dropout)          (None, 4, 4, 64)          0         
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 2, 2, 64)          0         
_________________________________________________________________
flatten (Flatten)            (None, 256)               0         
_________________________________________________________________
fc1 (Dense)                  (None, 128)               32896     
_________________________________________________________________
dropout_3 (Dropout)          (None, 128)               0         
_________________________________________________________________
fc2 (Dense)                  (None, 128)               16512     
_________________________________________________________________
dropout_4 (Dropout)          (None, 128)               0         
_________________________________________________________________
fc3 (Dense)                  (None, 2)                 258       
=================================================================
Total params: 227,554
Trainable params: 226,786
Non-trainable params: 768
_________________________________________________________________

Training the Model

Learning Rate - 0.0001
Optimizer - Adam
Number of Epochs - 200
Batch Size = 128

In [14]:
aug = ImageDataGenerator(rotation_range=20, zoom_range=0.2, horizontal_flip=True)
hist = model.fit(aug.flow(X_train, Y_train, batch_size=128), batch_size=128, epochs=200, validation_data=(X_test, Y_test))
Epoch 1/200
10/10 [==============================] - 1s 65ms/step - loss: 1.4387 - accuracy: 0.5194 - val_loss: 0.6987 - val_accuracy: 0.5292
Epoch 2/200
10/10 [==============================] - 0s 38ms/step - loss: 1.1671 - accuracy: 0.5289 - val_loss: 0.6969 - val_accuracy: 0.5292
Epoch 3/200
10/10 [==============================] - 0s 39ms/step - loss: 1.0384 - accuracy: 0.5702 - val_loss: 0.6685 - val_accuracy: 0.5430
Epoch 4/200
10/10 [==============================] - 0s 36ms/step - loss: 0.9631 - accuracy: 0.5573 - val_loss: 0.6383 - val_accuracy: 0.5911
Epoch 5/200
10/10 [==============================] - 0s 37ms/step - loss: 0.9391 - accuracy: 0.5814 - val_loss: 0.6202 - val_accuracy: 0.6529
Epoch 6/200
10/10 [==============================] - 0s 38ms/step - loss: 0.8702 - accuracy: 0.6081 - val_loss: 0.6109 - val_accuracy: 0.7045
Epoch 7/200
10/10 [==============================] - 0s 37ms/step - loss: 0.8195 - accuracy: 0.6133 - val_loss: 0.6040 - val_accuracy: 0.7423
Epoch 8/200
10/10 [==============================] - 0s 40ms/step - loss: 0.7268 - accuracy: 0.6494 - val_loss: 0.5970 - val_accuracy: 0.7320
Epoch 9/200
10/10 [==============================] - 0s 37ms/step - loss: 0.7889 - accuracy: 0.6443 - val_loss: 0.5887 - val_accuracy: 0.7388
Epoch 10/200
10/10 [==============================] - 0s 36ms/step - loss: 0.7710 - accuracy: 0.6365 - val_loss: 0.5852 - val_accuracy: 0.7216
Epoch 11/200
10/10 [==============================] - 0s 36ms/step - loss: 0.6738 - accuracy: 0.6693 - val_loss: 0.5804 - val_accuracy: 0.7113
Epoch 12/200
10/10 [==============================] - 0s 37ms/step - loss: 0.7212 - accuracy: 0.6512 - val_loss: 0.5712 - val_accuracy: 0.7079
Epoch 13/200
10/10 [==============================] - 0s 42ms/step - loss: 0.7081 - accuracy: 0.6701 - val_loss: 0.5580 - val_accuracy: 0.7182
Epoch 14/200
10/10 [==============================] - 0s 37ms/step - loss: 0.6275 - accuracy: 0.6804 - val_loss: 0.5436 - val_accuracy: 0.7388
Epoch 15/200
10/10 [==============================] - 0s 38ms/step - loss: 0.5976 - accuracy: 0.7011 - val_loss: 0.5314 - val_accuracy: 0.7457
Epoch 16/200
10/10 [==============================] - 0s 40ms/step - loss: 0.6096 - accuracy: 0.7183 - val_loss: 0.5158 - val_accuracy: 0.7560
Epoch 17/200
10/10 [==============================] - 0s 38ms/step - loss: 0.6161 - accuracy: 0.7089 - val_loss: 0.5007 - val_accuracy: 0.7698
Epoch 18/200
10/10 [==============================] - 0s 35ms/step - loss: 0.5789 - accuracy: 0.7158 - val_loss: 0.4880 - val_accuracy: 0.7732
Epoch 19/200
10/10 [==============================] - 0s 37ms/step - loss: 0.6104 - accuracy: 0.7175 - val_loss: 0.4694 - val_accuracy: 0.7938
Epoch 20/200
10/10 [==============================] - 0s 37ms/step - loss: 0.5471 - accuracy: 0.7468 - val_loss: 0.4504 - val_accuracy: 0.8144
Epoch 21/200
10/10 [==============================] - 0s 37ms/step - loss: 0.5407 - accuracy: 0.7373 - val_loss: 0.4328 - val_accuracy: 0.8351
Epoch 22/200
10/10 [==============================] - 0s 41ms/step - loss: 0.5733 - accuracy: 0.7442 - val_loss: 0.4149 - val_accuracy: 0.8454
Epoch 23/200
10/10 [==============================] - 0s 37ms/step - loss: 0.5267 - accuracy: 0.7528 - val_loss: 0.3964 - val_accuracy: 0.8557
Epoch 24/200
10/10 [==============================] - 0s 36ms/step - loss: 0.5218 - accuracy: 0.7562 - val_loss: 0.3823 - val_accuracy: 0.8591
Epoch 25/200
10/10 [==============================] - 0s 39ms/step - loss: 0.4876 - accuracy: 0.7717 - val_loss: 0.3658 - val_accuracy: 0.8660
Epoch 26/200
10/10 [==============================] - 0s 41ms/step - loss: 0.4704 - accuracy: 0.7709 - val_loss: 0.3484 - val_accuracy: 0.8797
Epoch 27/200
10/10 [==============================] - 0s 39ms/step - loss: 0.4935 - accuracy: 0.7717 - val_loss: 0.3295 - val_accuracy: 0.8969
Epoch 28/200
10/10 [==============================] - 0s 39ms/step - loss: 0.4761 - accuracy: 0.7890 - val_loss: 0.3118 - val_accuracy: 0.8969
Epoch 29/200
10/10 [==============================] - 0s 38ms/step - loss: 0.4479 - accuracy: 0.7993 - val_loss: 0.2958 - val_accuracy: 0.8969
Epoch 30/200
10/10 [==============================] - 0s 37ms/step - loss: 0.4283 - accuracy: 0.7933 - val_loss: 0.2809 - val_accuracy: 0.9003
Epoch 31/200
10/10 [==============================] - 0s 36ms/step - loss: 0.4430 - accuracy: 0.7898 - val_loss: 0.2668 - val_accuracy: 0.9003
Epoch 32/200
10/10 [==============================] - 0s 38ms/step - loss: 0.4157 - accuracy: 0.8243 - val_loss: 0.2540 - val_accuracy: 0.9038
Epoch 33/200
10/10 [==============================] - 0s 39ms/step - loss: 0.4257 - accuracy: 0.8200 - val_loss: 0.2420 - val_accuracy: 0.9038
Epoch 34/200
10/10 [==============================] - 0s 37ms/step - loss: 0.4180 - accuracy: 0.8174 - val_loss: 0.2314 - val_accuracy: 0.9141
Epoch 35/200
10/10 [==============================] - 0s 36ms/step - loss: 0.3615 - accuracy: 0.8338 - val_loss: 0.2217 - val_accuracy: 0.9141
Epoch 36/200
10/10 [==============================] - 0s 37ms/step - loss: 0.3817 - accuracy: 0.8363 - val_loss: 0.2133 - val_accuracy: 0.9175
Epoch 37/200
10/10 [==============================] - 0s 39ms/step - loss: 0.3989 - accuracy: 0.8467 - val_loss: 0.2047 - val_accuracy: 0.9175
Epoch 38/200
10/10 [==============================] - 0s 38ms/step - loss: 0.3618 - accuracy: 0.8381 - val_loss: 0.1949 - val_accuracy: 0.9244
Epoch 39/200
10/10 [==============================] - 0s 40ms/step - loss: 0.3375 - accuracy: 0.8536 - val_loss: 0.1894 - val_accuracy: 0.9244
Epoch 40/200
10/10 [==============================] - 0s 40ms/step - loss: 0.3562 - accuracy: 0.8562 - val_loss: 0.1828 - val_accuracy: 0.9244
Epoch 41/200
10/10 [==============================] - 0s 42ms/step - loss: 0.3492 - accuracy: 0.8527 - val_loss: 0.1753 - val_accuracy: 0.9347
Epoch 42/200
10/10 [==============================] - 0s 37ms/step - loss: 0.3093 - accuracy: 0.8872 - val_loss: 0.1696 - val_accuracy: 0.9347
Epoch 43/200
10/10 [==============================] - 0s 39ms/step - loss: 0.2875 - accuracy: 0.8760 - val_loss: 0.1636 - val_accuracy: 0.9347
Epoch 44/200
10/10 [==============================] - 0s 43ms/step - loss: 0.2973 - accuracy: 0.8829 - val_loss: 0.1571 - val_accuracy: 0.9381
Epoch 45/200
10/10 [==============================] - 0s 35ms/step - loss: 0.2839 - accuracy: 0.8846 - val_loss: 0.1506 - val_accuracy: 0.9416
Epoch 46/200
10/10 [==============================] - 0s 39ms/step - loss: 0.3039 - accuracy: 0.8820 - val_loss: 0.1472 - val_accuracy: 0.9416
Epoch 47/200
10/10 [==============================] - 0s 37ms/step - loss: 0.3203 - accuracy: 0.8915 - val_loss: 0.1437 - val_accuracy: 0.9416
Epoch 48/200
10/10 [==============================] - 0s 38ms/step - loss: 0.2719 - accuracy: 0.8949 - val_loss: 0.1365 - val_accuracy: 0.9381
Epoch 49/200
10/10 [==============================] - 0s 37ms/step - loss: 0.2456 - accuracy: 0.9009 - val_loss: 0.1323 - val_accuracy: 0.9347
Epoch 50/200
10/10 [==============================] - 0s 39ms/step - loss: 0.2885 - accuracy: 0.8880 - val_loss: 0.1311 - val_accuracy: 0.9416
Epoch 51/200
10/10 [==============================] - 0s 37ms/step - loss: 0.2253 - accuracy: 0.9027 - val_loss: 0.1307 - val_accuracy: 0.9450
Epoch 52/200
10/10 [==============================] - 0s 39ms/step - loss: 0.2374 - accuracy: 0.9208 - val_loss: 0.1306 - val_accuracy: 0.9416
Epoch 53/200
10/10 [==============================] - 0s 37ms/step - loss: 0.2336 - accuracy: 0.9061 - val_loss: 0.1317 - val_accuracy: 0.9519
Epoch 54/200
10/10 [==============================] - 0s 39ms/step - loss: 0.2159 - accuracy: 0.9156 - val_loss: 0.1277 - val_accuracy: 0.9485
Epoch 55/200
10/10 [==============================] - 0s 37ms/step - loss: 0.2191 - accuracy: 0.9173 - val_loss: 0.1273 - val_accuracy: 0.9485
Epoch 56/200
10/10 [==============================] - 0s 40ms/step - loss: 0.2244 - accuracy: 0.9130 - val_loss: 0.1281 - val_accuracy: 0.9485
Epoch 57/200
10/10 [==============================] - 0s 40ms/step - loss: 0.2159 - accuracy: 0.9156 - val_loss: 0.1271 - val_accuracy: 0.9485
Epoch 58/200
10/10 [==============================] - 0s 36ms/step - loss: 0.2177 - accuracy: 0.9182 - val_loss: 0.1181 - val_accuracy: 0.9485
Epoch 59/200
10/10 [==============================] - 0s 37ms/step - loss: 0.2125 - accuracy: 0.9190 - val_loss: 0.1113 - val_accuracy: 0.9519
Epoch 60/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1958 - accuracy: 0.9156 - val_loss: 0.0977 - val_accuracy: 0.9519
Epoch 61/200
10/10 [==============================] - 0s 35ms/step - loss: 0.1977 - accuracy: 0.9268 - val_loss: 0.0990 - val_accuracy: 0.9553
Epoch 62/200
10/10 [==============================] - 0s 41ms/step - loss: 0.2018 - accuracy: 0.9285 - val_loss: 0.1054 - val_accuracy: 0.9519
Epoch 63/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1956 - accuracy: 0.9302 - val_loss: 0.1009 - val_accuracy: 0.9553
Epoch 64/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1951 - accuracy: 0.9285 - val_loss: 0.0954 - val_accuracy: 0.9553
Epoch 65/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1766 - accuracy: 0.9388 - val_loss: 0.0982 - val_accuracy: 0.9519
Epoch 66/200
10/10 [==============================] - 0s 40ms/step - loss: 0.1920 - accuracy: 0.9302 - val_loss: 0.0976 - val_accuracy: 0.9553
Epoch 67/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1803 - accuracy: 0.9302 - val_loss: 0.1063 - val_accuracy: 0.9588
Epoch 68/200
10/10 [==============================] - 0s 41ms/step - loss: 0.1864 - accuracy: 0.9276 - val_loss: 0.1097 - val_accuracy: 0.9553
Epoch 69/200
10/10 [==============================] - 0s 41ms/step - loss: 0.1642 - accuracy: 0.9449 - val_loss: 0.0945 - val_accuracy: 0.9553
Epoch 70/200
10/10 [==============================] - 0s 35ms/step - loss: 0.1734 - accuracy: 0.9457 - val_loss: 0.0883 - val_accuracy: 0.9588
Epoch 71/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1758 - accuracy: 0.9294 - val_loss: 0.0918 - val_accuracy: 0.9553
Epoch 72/200
10/10 [==============================] - 0s 35ms/step - loss: 0.1736 - accuracy: 0.9345 - val_loss: 0.1038 - val_accuracy: 0.9588
Epoch 73/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1658 - accuracy: 0.9449 - val_loss: 0.0884 - val_accuracy: 0.9588
Epoch 74/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1803 - accuracy: 0.9345 - val_loss: 0.0792 - val_accuracy: 0.9622
Epoch 75/200
10/10 [==============================] - 0s 40ms/step - loss: 0.1730 - accuracy: 0.9440 - val_loss: 0.0815 - val_accuracy: 0.9622
Epoch 76/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1561 - accuracy: 0.9432 - val_loss: 0.0847 - val_accuracy: 0.9656
Epoch 77/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1576 - accuracy: 0.9328 - val_loss: 0.0784 - val_accuracy: 0.9691
Epoch 78/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1846 - accuracy: 0.9440 - val_loss: 0.0722 - val_accuracy: 0.9725
Epoch 79/200
10/10 [==============================] - 0s 40ms/step - loss: 0.1934 - accuracy: 0.9483 - val_loss: 0.0759 - val_accuracy: 0.9691
Epoch 80/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1378 - accuracy: 0.9500 - val_loss: 0.0746 - val_accuracy: 0.9691
Epoch 81/200
10/10 [==============================] - 0s 40ms/step - loss: 0.1613 - accuracy: 0.9552 - val_loss: 0.0696 - val_accuracy: 0.9794
Epoch 82/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1497 - accuracy: 0.9475 - val_loss: 0.0681 - val_accuracy: 0.9794
Epoch 83/200
10/10 [==============================] - 0s 35ms/step - loss: 0.1358 - accuracy: 0.9526 - val_loss: 0.0782 - val_accuracy: 0.9588
Epoch 84/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1127 - accuracy: 0.9595 - val_loss: 0.0757 - val_accuracy: 0.9622
Epoch 85/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1394 - accuracy: 0.9535 - val_loss: 0.0708 - val_accuracy: 0.9725
Epoch 86/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1218 - accuracy: 0.9612 - val_loss: 0.0781 - val_accuracy: 0.9588
Epoch 87/200
10/10 [==============================] - 0s 40ms/step - loss: 0.1369 - accuracy: 0.9518 - val_loss: 0.0739 - val_accuracy: 0.9725
Epoch 88/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1234 - accuracy: 0.9535 - val_loss: 0.0770 - val_accuracy: 0.9656
Epoch 89/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1158 - accuracy: 0.9526 - val_loss: 0.0864 - val_accuracy: 0.9622
Epoch 90/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1164 - accuracy: 0.9630 - val_loss: 0.0787 - val_accuracy: 0.9656
Epoch 91/200
10/10 [==============================] - 0s 42ms/step - loss: 0.1401 - accuracy: 0.9432 - val_loss: 0.0681 - val_accuracy: 0.9759
Epoch 92/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1119 - accuracy: 0.9535 - val_loss: 0.0702 - val_accuracy: 0.9759
Epoch 93/200
10/10 [==============================] - 0s 45ms/step - loss: 0.1212 - accuracy: 0.9509 - val_loss: 0.0801 - val_accuracy: 0.9622
Epoch 94/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1214 - accuracy: 0.9578 - val_loss: 0.0776 - val_accuracy: 0.9656
Epoch 95/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1092 - accuracy: 0.9638 - val_loss: 0.0850 - val_accuracy: 0.9691
Epoch 96/200
10/10 [==============================] - 0s 44ms/step - loss: 0.1126 - accuracy: 0.9638 - val_loss: 0.0747 - val_accuracy: 0.9725
Epoch 97/200
10/10 [==============================] - 0s 47ms/step - loss: 0.1058 - accuracy: 0.9664 - val_loss: 0.0697 - val_accuracy: 0.9725
Epoch 98/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1126 - accuracy: 0.9647 - val_loss: 0.0702 - val_accuracy: 0.9725
Epoch 99/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1242 - accuracy: 0.9612 - val_loss: 0.0688 - val_accuracy: 0.9725
Epoch 100/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1080 - accuracy: 0.9612 - val_loss: 0.0608 - val_accuracy: 0.9794
Epoch 101/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0926 - accuracy: 0.9647 - val_loss: 0.0615 - val_accuracy: 0.9794
Epoch 102/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0948 - accuracy: 0.9630 - val_loss: 0.0590 - val_accuracy: 0.9794
Epoch 103/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0842 - accuracy: 0.9690 - val_loss: 0.0650 - val_accuracy: 0.9759
Epoch 104/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1045 - accuracy: 0.9664 - val_loss: 0.0675 - val_accuracy: 0.9759
Epoch 105/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1026 - accuracy: 0.9707 - val_loss: 0.0700 - val_accuracy: 0.9759
Epoch 106/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0784 - accuracy: 0.9716 - val_loss: 0.0676 - val_accuracy: 0.9759
Epoch 107/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0993 - accuracy: 0.9699 - val_loss: 0.0605 - val_accuracy: 0.9656
Epoch 108/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1000 - accuracy: 0.9621 - val_loss: 0.0604 - val_accuracy: 0.9759
Epoch 109/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0946 - accuracy: 0.9724 - val_loss: 0.0649 - val_accuracy: 0.9759
Epoch 110/200
10/10 [==============================] - 0s 42ms/step - loss: 0.1217 - accuracy: 0.9664 - val_loss: 0.0578 - val_accuracy: 0.9828
Epoch 111/200
10/10 [==============================] - 0s 36ms/step - loss: 0.1037 - accuracy: 0.9569 - val_loss: 0.0519 - val_accuracy: 0.9828
Epoch 112/200
10/10 [==============================] - 0s 39ms/step - loss: 0.1083 - accuracy: 0.9543 - val_loss: 0.0515 - val_accuracy: 0.9828
Epoch 113/200
10/10 [==============================] - 0s 42ms/step - loss: 0.0969 - accuracy: 0.9707 - val_loss: 0.0491 - val_accuracy: 0.9828
Epoch 114/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0872 - accuracy: 0.9724 - val_loss: 0.0535 - val_accuracy: 0.9828
Epoch 115/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0784 - accuracy: 0.9750 - val_loss: 0.0555 - val_accuracy: 0.9828
Epoch 116/200
10/10 [==============================] - 0s 41ms/step - loss: 0.1156 - accuracy: 0.9578 - val_loss: 0.0520 - val_accuracy: 0.9863
Epoch 117/200
10/10 [==============================] - 0s 37ms/step - loss: 0.1022 - accuracy: 0.9621 - val_loss: 0.0518 - val_accuracy: 0.9759
Epoch 118/200
10/10 [==============================] - 0s 38ms/step - loss: 0.1014 - accuracy: 0.9647 - val_loss: 0.0542 - val_accuracy: 0.9759
Epoch 119/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0894 - accuracy: 0.9690 - val_loss: 0.0562 - val_accuracy: 0.9759
Epoch 120/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0815 - accuracy: 0.9681 - val_loss: 0.0506 - val_accuracy: 0.9828
Epoch 121/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0768 - accuracy: 0.9716 - val_loss: 0.0544 - val_accuracy: 0.9794
Epoch 122/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0782 - accuracy: 0.9716 - val_loss: 0.0554 - val_accuracy: 0.9794
Epoch 123/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0724 - accuracy: 0.9759 - val_loss: 0.0537 - val_accuracy: 0.9828
Epoch 124/200
10/10 [==============================] - 0s 34ms/step - loss: 0.0893 - accuracy: 0.9630 - val_loss: 0.0582 - val_accuracy: 0.9759
Epoch 125/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0769 - accuracy: 0.9802 - val_loss: 0.0646 - val_accuracy: 0.9759
Epoch 126/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0822 - accuracy: 0.9681 - val_loss: 0.0584 - val_accuracy: 0.9794
Epoch 127/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0824 - accuracy: 0.9664 - val_loss: 0.0555 - val_accuracy: 0.9794
Epoch 128/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0857 - accuracy: 0.9793 - val_loss: 0.0520 - val_accuracy: 0.9828
Epoch 129/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0828 - accuracy: 0.9802 - val_loss: 0.0547 - val_accuracy: 0.9863
Epoch 130/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0730 - accuracy: 0.9716 - val_loss: 0.0562 - val_accuracy: 0.9794
Epoch 131/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0662 - accuracy: 0.9716 - val_loss: 0.0520 - val_accuracy: 0.9828
Epoch 132/200
10/10 [==============================] - 0s 35ms/step - loss: 0.0744 - accuracy: 0.9707 - val_loss: 0.0538 - val_accuracy: 0.9828
Epoch 133/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0688 - accuracy: 0.9759 - val_loss: 0.0573 - val_accuracy: 0.9794
Epoch 134/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0665 - accuracy: 0.9742 - val_loss: 0.0530 - val_accuracy: 0.9828
Epoch 135/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0620 - accuracy: 0.9836 - val_loss: 0.0641 - val_accuracy: 0.9759
Epoch 136/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0734 - accuracy: 0.9750 - val_loss: 0.0501 - val_accuracy: 0.9828
Epoch 137/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0730 - accuracy: 0.9716 - val_loss: 0.0481 - val_accuracy: 0.9863
Epoch 138/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0646 - accuracy: 0.9793 - val_loss: 0.0496 - val_accuracy: 0.9828
Epoch 139/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0677 - accuracy: 0.9742 - val_loss: 0.0515 - val_accuracy: 0.9794
Epoch 140/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0654 - accuracy: 0.9759 - val_loss: 0.0658 - val_accuracy: 0.9794
Epoch 141/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0872 - accuracy: 0.9742 - val_loss: 0.0681 - val_accuracy: 0.9759
Epoch 142/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0583 - accuracy: 0.9785 - val_loss: 0.0584 - val_accuracy: 0.9794
Epoch 143/200
10/10 [==============================] - 0s 34ms/step - loss: 0.0656 - accuracy: 0.9767 - val_loss: 0.0544 - val_accuracy: 0.9828
Epoch 144/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0616 - accuracy: 0.9811 - val_loss: 0.0565 - val_accuracy: 0.9828
Epoch 145/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0469 - accuracy: 0.9879 - val_loss: 0.0561 - val_accuracy: 0.9828
Epoch 146/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0519 - accuracy: 0.9811 - val_loss: 0.0572 - val_accuracy: 0.9828
Epoch 147/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0559 - accuracy: 0.9750 - val_loss: 0.0623 - val_accuracy: 0.9759
Epoch 148/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0573 - accuracy: 0.9854 - val_loss: 0.0510 - val_accuracy: 0.9794
Epoch 149/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0873 - accuracy: 0.9673 - val_loss: 0.0498 - val_accuracy: 0.9759
Epoch 150/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0592 - accuracy: 0.9828 - val_loss: 0.0490 - val_accuracy: 0.9828
Epoch 151/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0837 - accuracy: 0.9776 - val_loss: 0.0515 - val_accuracy: 0.9828
Epoch 152/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0797 - accuracy: 0.9690 - val_loss: 0.0416 - val_accuracy: 0.9828
Epoch 153/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0587 - accuracy: 0.9776 - val_loss: 0.0491 - val_accuracy: 0.9828
Epoch 154/200
10/10 [==============================] - 0s 43ms/step - loss: 0.0601 - accuracy: 0.9776 - val_loss: 0.0674 - val_accuracy: 0.9725
Epoch 155/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0373 - accuracy: 0.9836 - val_loss: 0.0703 - val_accuracy: 0.9725
Epoch 156/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0571 - accuracy: 0.9811 - val_loss: 0.0613 - val_accuracy: 0.9759
Epoch 157/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0390 - accuracy: 0.9862 - val_loss: 0.0561 - val_accuracy: 0.9759
Epoch 158/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0568 - accuracy: 0.9767 - val_loss: 0.0825 - val_accuracy: 0.9725
Epoch 159/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0596 - accuracy: 0.9785 - val_loss: 0.0752 - val_accuracy: 0.9725
Epoch 160/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0511 - accuracy: 0.9819 - val_loss: 0.0670 - val_accuracy: 0.9725
Epoch 161/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0504 - accuracy: 0.9793 - val_loss: 0.0811 - val_accuracy: 0.9725
Epoch 162/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0435 - accuracy: 0.9811 - val_loss: 0.0911 - val_accuracy: 0.9725
Epoch 163/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0333 - accuracy: 0.9862 - val_loss: 0.0803 - val_accuracy: 0.9759
Epoch 164/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0337 - accuracy: 0.9888 - val_loss: 0.0581 - val_accuracy: 0.9794
Epoch 165/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0490 - accuracy: 0.9836 - val_loss: 0.0548 - val_accuracy: 0.9828
Epoch 166/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0500 - accuracy: 0.9811 - val_loss: 0.0523 - val_accuracy: 0.9794
Epoch 167/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0614 - accuracy: 0.9742 - val_loss: 0.0737 - val_accuracy: 0.9759
Epoch 168/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0448 - accuracy: 0.9871 - val_loss: 0.0839 - val_accuracy: 0.9725
Epoch 169/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0495 - accuracy: 0.9802 - val_loss: 0.0694 - val_accuracy: 0.9759
Epoch 170/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0453 - accuracy: 0.9879 - val_loss: 0.0510 - val_accuracy: 0.9759
Epoch 171/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0414 - accuracy: 0.9879 - val_loss: 0.0446 - val_accuracy: 0.9794
Epoch 172/200
10/10 [==============================] - 0s 35ms/step - loss: 0.0360 - accuracy: 0.9871 - val_loss: 0.0445 - val_accuracy: 0.9828
Epoch 173/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0373 - accuracy: 0.9871 - val_loss: 0.0393 - val_accuracy: 0.9828
Epoch 174/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0397 - accuracy: 0.9845 - val_loss: 0.0400 - val_accuracy: 0.9828
Epoch 175/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0416 - accuracy: 0.9811 - val_loss: 0.0409 - val_accuracy: 0.9828
Epoch 176/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0281 - accuracy: 0.9914 - val_loss: 0.0422 - val_accuracy: 0.9863
Epoch 177/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0490 - accuracy: 0.9828 - val_loss: 0.0390 - val_accuracy: 0.9863
Epoch 178/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0353 - accuracy: 0.9905 - val_loss: 0.0379 - val_accuracy: 0.9863
Epoch 179/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0433 - accuracy: 0.9845 - val_loss: 0.0327 - val_accuracy: 0.9828
Epoch 180/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0448 - accuracy: 0.9819 - val_loss: 0.0378 - val_accuracy: 0.9863
Epoch 181/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0253 - accuracy: 0.9940 - val_loss: 0.0758 - val_accuracy: 0.9725
Epoch 182/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0441 - accuracy: 0.9836 - val_loss: 0.0619 - val_accuracy: 0.9759
Epoch 183/200
10/10 [==============================] - 0s 38ms/step - loss: 0.0478 - accuracy: 0.9845 - val_loss: 0.0465 - val_accuracy: 0.9828
Epoch 184/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0467 - accuracy: 0.9828 - val_loss: 0.0326 - val_accuracy: 0.9897
Epoch 185/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0348 - accuracy: 0.9854 - val_loss: 0.0381 - val_accuracy: 0.9897
Epoch 186/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0279 - accuracy: 0.9897 - val_loss: 0.0313 - val_accuracy: 0.9897
Epoch 187/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0311 - accuracy: 0.9931 - val_loss: 0.0303 - val_accuracy: 0.9897
Epoch 188/200
10/10 [==============================] - 0s 35ms/step - loss: 0.0452 - accuracy: 0.9836 - val_loss: 0.0300 - val_accuracy: 0.9897
Epoch 189/200
10/10 [==============================] - 0s 41ms/step - loss: 0.0393 - accuracy: 0.9871 - val_loss: 0.0291 - val_accuracy: 0.9897
Epoch 190/200
10/10 [==============================] - 0s 42ms/step - loss: 0.0255 - accuracy: 0.9897 - val_loss: 0.0277 - val_accuracy: 0.9897
Epoch 191/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0424 - accuracy: 0.9879 - val_loss: 0.0363 - val_accuracy: 0.9828
Epoch 192/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0508 - accuracy: 0.9854 - val_loss: 0.0458 - val_accuracy: 0.9828
Epoch 193/200
10/10 [==============================] - 0s 39ms/step - loss: 0.0320 - accuracy: 0.9888 - val_loss: 0.0362 - val_accuracy: 0.9897
Epoch 194/200
10/10 [==============================] - 0s 36ms/step - loss: 0.0334 - accuracy: 0.9888 - val_loss: 0.0288 - val_accuracy: 0.9931
Epoch 195/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0483 - accuracy: 0.9871 - val_loss: 0.0342 - val_accuracy: 0.9897
Epoch 196/200
10/10 [==============================] - 0s 42ms/step - loss: 0.0411 - accuracy: 0.9854 - val_loss: 0.0346 - val_accuracy: 0.9828
Epoch 197/200
10/10 [==============================] - 0s 40ms/step - loss: 0.0423 - accuracy: 0.9845 - val_loss: 0.0359 - val_accuracy: 0.9897
Epoch 198/200
10/10 [==============================] - 0s 37ms/step - loss: 0.0332 - accuracy: 0.9879 - val_loss: 0.0317 - val_accuracy: 0.9828
Epoch 199/200
10/10 [==============================] - 0s 43ms/step - loss: 0.0319 - accuracy: 0.9879 - val_loss: 0.0309 - val_accuracy: 0.9863
Epoch 200/200
10/10 [==============================] - 0s 46ms/step - loss: 0.0220 - accuracy: 0.9914 - val_loss: 0.0269 - val_accuracy: 0.9863

Plotting Loss Values and Accuracy against Number of Epochs for Train Set and Test Set

In [15]:
figure = plt.figure(figsize=(10, 10))
plt.plot(hist.history['accuracy'], label='Train_accuracy')
plt.plot(hist.history['val_accuracy'], label='Test_accuracy')
plt.title('Model Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend(loc="upper left")
plt.show()

figure2 = plt.figure(figsize=(10, 10))
plt.plot(hist.history['loss'], label='Train_loss')
plt.plot(hist.history['val_loss'], label='Test_loss')
plt.title('Model Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend(loc="upper left")
plt.show()

Evaluating Model on Test Set

In [16]:
pred = model.evaluate(X_test, Y_test)
print(f'Test Set Accuracy: {pred[1]}')
print(f'Test Set Loss: {pred[0]}')
10/10 [==============================] - 0s 4ms/step - loss: 0.0269 - accuracy: 0.9863
Test Set Accuracy: 0.9862542748451233
Test Set Loss: 0.026876887306571007

Classification Report

In [17]:
ypred = model.predict(X_test)
ypred = np.argmax(ypred, axis=1)
Y_test_pred = np.argmax(Y_test, axis=1)
print(classification_report(Y_test_pred, ypred))
              precision    recall  f1-score   support

           0       0.98      0.99      0.99       137
           1       0.99      0.98      0.99       154

    accuracy                           0.99       291
   macro avg       0.99      0.99      0.99       291
weighted avg       0.99      0.99      0.99       291

Confusion Matrix

In [18]:
matrix = confusion_matrix(Y_test_pred, ypred)
df_cm = pd.DataFrame(matrix, index=[0, 1], columns=[0, 1])
figure = plt.figure(figsize=(5, 5))
sns.heatmap(df_cm, annot=True, fmt='d')
Out[18]:
<AxesSubplot:>

Saving the Model

In [19]:
model.save('Driver_Drowsiness_Detection.h5')

Testing Model on Images

Testing the classification performnace on random images of Open and Closed Eyes

In [20]:
labels = ['Closed', 'Open']
img_closed1 = cv2.imread('driver_drowsiness_detection/closed_eye.jpg')
img_closed2 = cv2.imread('driver_drowsiness_detection/closed_eye2.jpg')
img_open1 = cv2.imread('driver_drowsiness_detection/open_eye.jpg')
img_open2 = cv2.imread('driver_drowsiness_detection/open_eye2.jpg')

img_closed1 = cv2.resize(img_closed1, (32, 32))
img_closed2 = cv2.resize(img_closed2, (32, 32))
img_open1 = cv2.resize(img_open1, (32, 32))
img_open2 = cv2.resize(img_open2, (32, 32))

img_closed1 = np.array(img_closed1)
img_closed2 = np.array(img_closed2)
img_open1 = np.array(img_open1)
img_open2 = np.array(img_open2)

img_closed1 = np.expand_dims(img_closed1, axis=0)
img_closed2 = np.expand_dims(img_closed2, axis=0)
img_open1 = np.expand_dims(img_open1, axis=0)
img_open2 = np.expand_dims(img_open2, axis=0)
In [21]:
ypred_closed1 = model.predict(img_closed1)
ypred_closed2 = model.predict(img_closed2)
ypred_open1 = model.predict(img_open1)
ypred_open2 = model.predict(img_open2)
In [22]:
figure = plt.figure(figsize=(2, 2))
img_closed1 = np.squeeze(img_closed1, axis=0)
plt.imshow(img_closed1)
plt.axis('off')
plt.title(f'Prediction by the model: {labels[np.argmax(ypred_closed1[0], axis=0)]}')
plt.show()
In [23]:
figure = plt.figure(figsize=(2, 2))
img_closed2 = np.squeeze(img_closed2, axis=0)
plt.imshow(img_closed2)
plt.axis('off')
plt.title(f'Prediction by the model: {labels[np.argmax(ypred_closed2[0], axis=0)]}')
plt.show()
In [24]:
figure = plt.figure(figsize=(2, 2))
img_open1 = np.squeeze(img_open1, axis=0)
plt.imshow(img_open1)
plt.axis('off')
plt.title(f'Prediction by the model: {labels[np.argmax(ypred_open1[0], axis=0)]}')
plt.show()
In [25]:
figure = plt.figure(figsize=(2, 2))
img_open2 = np.squeeze(img_open2, axis=0)
plt.imshow(img_open2)
plt.axis('off')
plt.title(f'Prediction by the model: {labels[np.argmax(ypred_open2[0], axis=0)]}')
plt.show()

Creating Pipeline for making predictions on full face images

In [26]:
def full_face_detection_pipeline(input_image_path):
    face_cascade = cv2.CascadeClassifier('driver_drowsiness_detection/haarcascade_frontalface_default.xml')
    eye_cascade = cv2.CascadeClassifier('driver_drowsiness_detection/haarcascade_eye.xml')
    detector = dlib.get_frontal_face_detector()
    predictor = dlib.shape_predictor('driver_drowsiness_detection/shape_predictor_68_face_landmarks.dat')
    fa = FaceAligner(predictor, desiredFaceWidth=256)
    test_image = cv2.imread(input_image_path)
    test_image = imutils.resize(test_image, width=800)
    test_image_gray = cv2.cvtColor(test_image, cv2.COLOR_BGR2GRAY)
    rects = detector(test_image_gray, 2)
    for rect in rects:
        (x, y, w, h) = rect_to_bb(rect)
        faceOrig = imutils.resize(test_image[y:y+h, x:x+w], width=256)
        faceAligned = fa.align(test_image, test_image_gray, rect)
        faceAligned_gray = cv2.cvtColor(faceAligned, cv2.COLOR_BGR2GRAY)
        plt.imshow(faceAligned_gray)
        plt.axis('off')
        plt.title('Aligned Face')
        plt.show()
        eyes = eye_cascade.detectMultiScale(faceAligned_gray, 1.1, 4)
        predictions = []
        for (ex, ey, ew, eh) in eyes:
            eye = faceAligned[ey:ey+eh, ex:ex+ew]
#             cv2.rectangle(test_image, (x+ex, y+ey), (x+ex+ew, y+ey+eh), (0, 0, 255), 8)
            eye = cv2.resize(eye, (32, 32))
            eye = np.array(eye)
            eye = np.expand_dims(eye, axis=0)
            ypred = model.predict(eye)
            ypred = np.argmax(ypred[0], axis=0)
            predictions.append(ypred)
        if all(i==0 for i in predictions):
            cv2.rectangle(test_image, (x, y), (x+w, y+h), (0, 0, 255), 8)
            cv2.putText(test_image, 'Sleeping', (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 0, 255), 3)
        else:
            cv2.rectangle(test_image, (x, y), (x+w, y+h), (0, 255, 0), 8)
            cv2.putText(test_image, 'Not Sleeping', (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 255, 0), 3)
    output_path = 'driver_drowsiness_detection/test_image_prediction.jpg'
    cv2.imwrite(output_path, test_image) 
    return output_path
In [27]:
figure = plt.figure(figsize=(5, 5))
predicted_image = cv2.imread(full_face_detection_pipeline('driver_drowsiness_detection/active_person.jpg'))
predicted_image = cv2.cvtColor(predicted_image, cv2.COLOR_BGR2RGB)
plt.imshow(predicted_image)
plt.axis('off')
plt.show()
In [28]:
figure = plt.figure(figsize=(5, 5))
predicted_image = cv2.imread(full_face_detection_pipeline('driver_drowsiness_detection/drowsy_person.jpg'))
predicted_image = cv2.cvtColor(predicted_image, cv2.COLOR_BGR2RGB)
plt.imshow(predicted_image)
plt.axis('off')
plt.show()
In [29]:
figure = plt.figure(figsize=(5, 5))
predicted_image = cv2.imread(full_face_detection_pipeline('driver_drowsiness_detection/sleepy-driver.jpg'))
predicted_image = cv2.cvtColor(predicted_image, cv2.COLOR_BGR2RGB)
plt.imshow(predicted_image)
plt.axis('off')
plt.show()