In [1]:
import numpy as np, matplotlib.pyplot as plt
import os
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, Dense, MaxPooling2D, Activation, Dropout, BatchNormalization, Flatten
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.utils import to_categorical
Input Data¶
In [2]:
train = ImageDataGenerator(rescale = 1/255)
test = ImageDataGenerator(rescale = 1/255)
train_dataset = train.flow_from_directory(directory='/home/jupyter-SiddharthGan/week5/seg_train/seg_train/',
target_size=(50,50), shuffle=True)
test_dataset = test.flow_from_directory(directory='/home/jupyter-SiddharthGan/week5/seg_test/seg_test/',
target_size=(50,50), shuffle=True)
Data Visualization¶
In [3]:
indices = [np.random.randint(32) for i in range(10)]
plt.figure(figsize=(20,8))
for i in enumerate(indices):
plt.subplot(2,5,i[0]+1)
plt.imshow(train_dataset[0][0][i[1]])
plt.title(train_dataset[0][1][i[1]])
plt.show()
In [4]:
# Mapping the class indices
values = list(train_dataset.class_indices.values())
keys = list(train_dataset.class_indices.keys())
dics = list(map(lambda x,y: {x:y}, values,keys))
from functools import reduce
mappings = reduce(lambda x,y: {**x,**y}, dics)
mappings
Out[4]:
Model Architecture¶
In [5]:
# Initializing the model
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(3,3), padding='same', input_shape=(50,50,3)))
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=(3,3)))
model.add(Dropout(0.25))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(filters=50, kernel_size=(3,3)))
model.add(Activation('relu'))
model.add(Conv2D(filters=50, kernel_size=(3,3)))
model.add(Dropout(0.25))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
kernel_regularizer = keras.regularizers.l1_l2(l1=1e-5, l2=1e-4)
model.add(Dense(units=40, activation='relu', kernel_regularizer=kernel_regularizer))
model.add(Dense(50, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(6, activation='softmax'))
model.summary()
Training the model¶
In [6]:
# Training the model
model.compile(loss='CategoricalCrossentropy', optimizer='adam', metrics='accuracy')
history = model.fit(train_dataset, batch_size=80, epochs=12, validation_data=test_dataset)
Accuracy and Loss Graphs¶
In [7]:
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label = 'val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend(loc='lower right')
Out[7]:
In [8]:
plt.plot(history.history['loss'], label='loss')
plt.plot(history.history['val_loss'], label = 'val_loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend(loc='lower right')
Out[8]:
Prediction¶
In [9]:
# Forming the prediction dataset
predict = ImageDataGenerator(rescale = 1/255)
predict_dataset = predict.flow_from_directory('/home/jupyter-SiddharthGan/week5/seg_pred/',
target_size=(50,50), shuffle=True, batch_size=7301)
In [10]:
# Predicting images of the corresponding index
index = 25
tar = np.argmax(model.predict(predict_dataset[0][0][index].reshape(-1,50,50,3)))
for i in mappings:
if tar == i:
cls = mappings[i]
plt.imshow(predict_dataset[0][0][index])
plt.title(cls)
plt.show()
In [11]:
model.save('model.h5')
DeepCC¶
In [12]:
!deepCC model.h5
In [ ]: