NOTE: This Use Case is not purposed for resource constrained devices.
Marble Defect Classifier¶
Credit: AITS Cainvas Community
Photo by Beethowen Souza on Dribbble
- This application of detecting defect on marble is an example of how deep learning can be implemented to Industrial Business like Lime Stone Manufacturers. Fragile material experiences a lot damage during process and consumption of electricity on already wasted material is just extra waste.
- It will reduce the inprocess error and add a middle layer to quality inspection.
- image dimension 256x256
- There are four classes of deformations.
In [1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tensorflow import keras as ks
import os
import cv2
In [2]:
!wget https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/marble.zip
In [3]:
!unzip -qo marble.zip
In [4]:
train_dir = "dataset/train/"
test_dir = "dataset/test/"
In [5]:
def get_image(path):
img = cv2.imread(path)
print(img.shape)
plt.imshow(img)
In [6]:
image = "dataset/test/crack/_0_0_20210531_17292_0.jpg"
In [7]:
get_image(image)
Create Image Data Generator¶
In [8]:
def datapreprocessing(main_dir,bsize):
from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_gen = ImageDataGenerator(rescale=1.0/255,
zoom_range=0.2,
shear_range=0.1,
horizontal_flip=True,
vertical_flip=True,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
#validation_split=0.3,
fill_mode='nearest',
)
train_generator = train_gen.flow_from_directory(
directory=main_dir,
target_size=(48,48),
batch_size=bsize,
color_mode="rgb",
shuffle=True,
subset="training",
class_mode='categorical')
return train_generator
In [9]:
traingen = datapreprocessing(train_dir,20)
validgen = datapreprocessing(test_dir,20)
In [10]:
labelnames = traingen.class_indices
labelnames
Out[10]:
In [11]:
#Function that can build a dataframe on passing folderpath.
def getdata(folder_path):
sig = pd.DataFrame(columns=['image_abs_path','image_labels'])
for key,value in labelnames.items():
#print("processing for label: {}".format(label))
label_i = folder_path+"/"+str(key)
#read directory
dirs_label_i = os.listdir(label_i)
idx = 0
for image in dirs_label_i:
#create a absolute image path
sig_i = os.path.join(label_i,image)
#print('Absolute path for image no. {} and label {}: {}'\
#.format(idx,label,flower_i))
#fill the dataframe with path and label
sig = sig.append({'image_abs_path':sig_i,
'image_labels':key},
ignore_index=True)
idx += 1
return sig
In [12]:
#Create Train Dataframe as repository of paths and labels.
valid = getdata(test_dir)
In [13]:
valid
Out[13]:
In [14]:
# Fetch n number of images from train data frame
def get_n_images(n,df,label):
import warnings
warnings.filterwarnings('ignore')
train = df[df["image_labels"]==label]
print(len(train))
i = 0
m = n/2
plt.figure(figsize=(12, 6))
for path in train['image_abs_path'][0:n]:
plt.subplot(2,m,i+1)
get_image(path)
#plt.title(train['image_labels'][i])
i += 1
plt.tight_layout()
plt.show()
In [15]:
def visualize_gen(train_generator):
#Visualising Images Processed
plt.figure(figsize=(6, 3))
for i in range(0, 10):
plt.subplot(2, 5, i+1)
for X_batch, Y_batch in train_generator:
image = X_batch[0]
plt.axis("off")
plt.imshow((image*255).astype(np.uint8))
break
plt.tight_layout()
plt.show()
In [16]:
visualize_gen(traingen)
In [17]:
input_shape = traingen.image_shape
input_shape
Out[17]:
Build Model's Architecture¶
In [18]:
def imageclf2(input_shape):
from tensorflow import keras as ks
#from tensorflow.keras import regularizers
model = ks.models.Sequential()
#building architecture
#Adding layers
model.add(ks.layers.Conv2D(8,(3,3),
strides=1,
activation="relu",
padding='same',
name="layer1",
input_shape=input_shape))
model.add(ks.layers.MaxPooling2D(pool_size=2,strides=2))
model.add(ks.layers.Dropout(0.2))
model.add(ks.layers.Conv2D(8,(3,3),strides=1,padding="same",activation="relu",name="layer2"))
model.add(ks.layers.MaxPooling2D(pool_size=2,strides=2))
model.add(ks.layers.Flatten())
model.add(ks.layers.Dense(128,activation="relu",
name="layer5"))
model.add(ks.layers.Dropout(0.2))
model.add(ks.layers.Dense(4,activation="softmax",
name="output"))
model.summary()
return model
Build the Compiler¶
In [19]:
def compiler2(model,train_generator,valid_generator,epchs,bsize=32,lr=0.0001):
from tensorflow import keras as ks
callbck = ks.callbacks.EarlyStopping(monitor='val_loss',patience=10,
verbose=2,
restore_best_weights=True,)
opt = ks.optimizers.Adam(learning_rate=lr)
model.compile(loss="categorical_crossentropy",
optimizer=opt,
metrics=["accuracy"])
history = model.fit(train_generator,
epochs=epchs,
callbacks=[callbck],
validation_data=valid_generator,
verbose = 1,
#steps_per_epoch = train_generator.n // bsize
)
#Visualise curves
plt.plot(history.history['accuracy'], label='train_acc')
plt.plot(history.history['val_accuracy'], label='valid_acc')
plt.title('lrate='+str(lr), pad=-50)
plt.legend()
plt.grid(True)
return model,history
Fit Model and Evaluate¶
In [20]:
model01 = imageclf2(input_shape)
In [21]:
model_com01 = compiler2(model01,traingen,validgen,100)
In [22]:
#Visualise loss curves
history = model_com01[1]
plt.plot(history.history['loss'], label='loss')
plt.plot(history.history['val_loss'], label='val_loss')
plt.legend()
plt.grid()
plt.show()
Get Prediction and visualise the output.¶
In [23]:
def get_predictions(n):
image1= validgen[0][0][n]
plt.imshow(image1)
input_arr = ks.preprocessing.image.img_to_array(validgen[0][0][n])
input_arr = np.array([input_arr]) # Convert single image to a batch.
predictions = model_com01[0].predict_classes(input_arr)
return predictions
In [24]:
get_predictions(11)
Out[24]:
In [25]:
# Fetch n number of images from train data frame
def get_n_images(n,df,label):
import warnings
warnings.filterwarnings('ignore')
train = df[df["image_labels"]==label]
print(len(train))
i = 0
m = n/2
plt.figure(figsize=(12, 6))
for path in train['image_abs_path'][0:n]:
plt.subplot(2,m,i+1)
get_image(path)
#plt.title(train['image_labels'][i])
i += 1
plt.tight_layout()
plt.show()
In [26]:
#Visualise Predictions
get_n_images(6,valid,"good")
- Model has predicted that there is no deformation.
Save the model!¶
In [27]:
# save the model to disk
model = model_com01[0]
model.save('saved_models/MarbleModel.tf')
DeepCC¶
In [28]:
!deepCC 'saved_models/MarbleModel.tf'
In [ ]: