NOTE: This Use Case is not purposed for resource constrained devices.
Traffic Sign Classification is an essential part of Autonomous Vehicles so that it can understand the traffic signs properly and take actions accordingly.
Import Dataset¶
In [1]:
# This will load the dataset.You will see a folder called ALL in your workspace.
!wget -N "https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/traffic.zip"
!unzip -qo traffic.zip
!rm traffic.zip
Importing Libraries¶
In [2]:
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
sns.set_style('whitegrid')
%matplotlib inline
import os
import time
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix, classification_report
from collections import Counter
import cv2
from tensorflow.keras.utils import plot_model
from keras.layers import Dense, Flatten, AveragePooling2D, Dropout
from keras.models import Model
from keras.applications.vgg16 import VGG16
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
Reading Data of Class Labels¶
In [3]:
path = 'traffic'
lab = pd.read_csv('traffic/labels.csv')
In [4]:
d = dict()
class_labels = dict()
for dirs in os.listdir(path + '/myData'):
count = len(os.listdir(path+'/myData/'+dirs))
d[dirs+' => '+lab[lab.ClassId == int(dirs)].values[0][1]] = count
class_labels[int(dirs)] = lab[lab.ClassId == int(dirs)].values[0][1]
Reading Image Data¶
In [5]:
# input image dimensions
img_rows, img_cols = 224, 224
# The images are RGB.
img_channels = 3
nb_classes = len(class_labels.keys())
datagen = ImageDataGenerator()
data = datagen.flow_from_directory('traffic/myData',
target_size=(224, 224),
batch_size=2000,
class_mode='categorical',
shuffle=True )
In [6]:
X , y = data.next()
In [7]:
# Labels are one hot encoded
print(f"Data Shape :{X.shape}\nLabels shape :{y.shape}")
Sample Images of Dataset¶
In [8]:
fig, axes = plt.subplots(10,10, figsize=(18,18))
for i,ax in enumerate(axes.flat):
r = np.random.randint(X.shape[0])
ax.imshow(X[r].astype('uint8'))
ax.grid(False)
ax.axis('off')
ax.set_title('Label: '+str(np.argmax(y[r])))
Test-Train Split¶
In [9]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.10, random_state=11)
In [10]:
print("Train Shape: {}\nTest Shape : {}".format(X_train.shape, X_test.shape))
Model Archutecture¶
In [11]:
# Model creation with changes
model = VGG16(input_shape=(224,224,3),include_top=False)
for layer in model.layers:
layer.trainable = False
newModel = model.output
newModel = AveragePooling2D()(newModel)
newModel = Flatten()(newModel)
newModel = Dense(128, activation="relu")(newModel)
newModel = Dropout(0.5)(newModel)
newModel = Dense(20, activation='softmax')(newModel)
model = Model(inputs=model.input, outputs=newModel)
In [12]:
model.summary()
Compiling the Model¶
In [13]:
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
Model Training¶
In [14]:
n_epochs = 10
history = model.fit(X_train, y_train, batch_size = 32, epochs = n_epochs, verbose = 1,
validation_data = (X_test, y_test))
Training Plot¶
In [15]:
model.save("traffic_signal.h5")
pd.DataFrame(history.history).plot(figsize=(8, 5))
plt.grid(True)
plt.show()
In [16]:
loss, acc = model.evaluate(X_test, y_test)
print('Accuracy: ', acc, '\nLoss : ', loss)
Obtaining Predictions¶
In [17]:
%%time
pred = np.argmax(model.predict(X_test), axis = 1)
Classification Report¶
In [18]:
labels = [class_labels[i] for i in range(20)]
print(classification_report(np.argmax(y_test, axis = 1), pred, target_names = labels))
Accessing the performance of the Model¶
In [19]:
fig, axes = plt.subplots(5,5, figsize=(18,18))
for i,ax in enumerate(axes.flat):
r = np.random.randint(X_test.shape[0])
ax.imshow(X_test[r].astype('uint8'))
ax.grid(False)
ax.axis('off')
ax.set_title('Original: {} Predicted: {}'.format(np.argmax(y_test[r]), np.argmax(model.predict(X_test[r].reshape(1, 224, 224, 3)))))
Compiling Model with DeepC¶
In [20]:
!deepCC traffic_signal.h5