In [1]:
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import matplotlib.pyplot as plt
import warnings
import os
import tensorflow as tf
warnings.filterwarnings('ignore')
import shutil
from random import shuffle
Unzipping Data¶
In [2]:
!wget https://cainvas-static.s3.amazonaws.com/media/user_data/SiddharthGan/archive_1.zip
!unzip -qo archive_1.zip
# zip folder is not needed anymore
!rm archive_1.zip
--2021-08-30 08:03:32-- https://cainvas-static.s3.amazonaws.com/media/user_data/SiddharthGan/archive_1.zip Resolving cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)... 52.219.62.120 Connecting to cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)|52.219.62.120|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 411916 (402K) [application/zip] Saving to: ‘archive_1.zip’ archive_1.zip 100%[===================>] 402.26K --.-KB/s in 0.003s 2021-08-30 08:03:33 (120 MB/s) - ‘archive_1.zip’ saved [411916/411916]
In [3]:
shapes = ['triangles', 'circles', 'squares']
path= 'shapes/'
files= []
result= []
for shape in shapes:
new_path= path+shape
for file in os.listdir(new_path):
files.append(os.path.join(new_path,file))
result.append(shape)
In [4]:
len(files)
Out[4]:
300
In [5]:
len(result)
Out[5]:
300
In [6]:
files[:5]
Out[6]:
['shapes/triangles/drawing(3).png', 'shapes/triangles/drawing(16).png', 'shapes/triangles/drawing(71).png', 'shapes/triangles/drawing(68).png', 'shapes/triangles/drawing(75).png']
In [7]:
from skimage import color
images=[]
for file in files:
img= plt.imread(file)
img= color.rgb2gray(img)
img= img.ravel()
images.append(img)
In [8]:
len(images)
Out[8]:
300
Data Visualsation¶
In [9]:
df= pd.DataFrame(images)
In [10]:
df.head()
Out[10]:
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | ... | 774 | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 |
1 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 |
2 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 |
3 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 |
4 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 |
5 rows × 784 columns
In [11]:
df['result']= result
df.head()
Out[11]:
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | ... | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | result | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
1 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
2 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
3 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
4 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
5 rows × 785 columns
In [12]:
from sklearn.utils import shuffle
df= shuffle(df)
df.head()
Out[12]:
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | ... | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | result | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
149 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | circles |
234 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | squares |
195 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | circles |
34 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | triangles |
282 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | squares |
5 rows × 785 columns
In [13]:
df['result']= df['result'].replace({'circles':0, 'triangles':1, 'squares':2})
df.head()
Out[13]:
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | ... | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | result | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
149 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 0 |
234 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 2 |
195 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 0 |
34 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1 |
282 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | ... | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 1.0 | 2 |
5 rows × 785 columns
Dropping useless column¶
In [14]:
y= df['result']
X= df.drop('result', axis=1)
In [15]:
X= X.values.reshape(-1,28,28,1)
Uniform Type Conversion¶
In [16]:
from tensorflow.keras.utils import to_categorical
y= to_categorical(y)
y.astype('int32')
Out[16]:
array([[1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 0, 1], [0, 0, 1], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 1, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 1, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 0, 1]], dtype=int32)
Train Test Split¶
In [17]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train,y_test= train_test_split(X,y, test_size=0.2, random_state=42)
Model Architecture¶
In [18]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Dropout, Conv2D, BatchNormalization, MaxPool2D
In [19]:
model= Sequential()
model.add(Conv2D(32, (3,3), activation='relu', padding='same', input_shape=(28,28,1)))
model.add(Conv2D(32, (3,3), activation='relu'))
model.add(MaxPool2D(2,2))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3,3), activation='relu'))
model.add(MaxPool2D(2,2))
model.add(Flatten())
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(3, activation='softmax'))
In [20]:
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 28, 28, 32) 320 _________________________________________________________________ conv2d_1 (Conv2D) (None, 26, 26, 32) 9248 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 13, 13, 32) 0 _________________________________________________________________ dropout (Dropout) (None, 13, 13, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 11, 11, 64) 18496 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 5, 5, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 1600) 0 _________________________________________________________________ dense (Dense) (None, 64) 102464 _________________________________________________________________ dropout_1 (Dropout) (None, 64) 0 _________________________________________________________________ dense_1 (Dense) (None, 3) 195 ================================================================= Total params: 130,723 Trainable params: 130,723 Non-trainable params: 0 _________________________________________________________________
In [21]:
from tensorflow.keras.callbacks import ModelCheckpoint
model_save= ModelCheckpoint('best.h5', save_best_only=True, monitor='val_accuracy')
In [22]:
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
Training the model¶
In [23]:
history= model.fit(X_train, y_train, batch_size=20, epochs=100, verbose=1, validation_data=(X_test, y_test), callbacks=[model_save])
Epoch 1/100 12/12 [==============================] - 0s 17ms/step - loss: 1.1123 - accuracy: 0.3042 - val_loss: 1.0959 - val_accuracy: 0.3167 Epoch 2/100 12/12 [==============================] - 0s 5ms/step - loss: 1.0974 - accuracy: 0.3417 - val_loss: 1.0921 - val_accuracy: 0.4333 Epoch 3/100 12/12 [==============================] - 0s 4ms/step - loss: 1.0963 - accuracy: 0.3333 - val_loss: 1.0907 - val_accuracy: 0.3500 Epoch 4/100 12/12 [==============================] - 0s 5ms/step - loss: 1.0819 - accuracy: 0.4333 - val_loss: 1.0654 - val_accuracy: 0.5167 Epoch 5/100 12/12 [==============================] - 0s 4ms/step - loss: 1.0621 - accuracy: 0.4542 - val_loss: 1.0147 - val_accuracy: 0.4833 Epoch 6/100 12/12 [==============================] - 0s 6ms/step - loss: 0.9976 - accuracy: 0.5167 - val_loss: 0.9249 - val_accuracy: 0.5333 Epoch 7/100 12/12 [==============================] - 0s 6ms/step - loss: 0.9264 - accuracy: 0.5917 - val_loss: 0.8490 - val_accuracy: 0.5833 Epoch 8/100 12/12 [==============================] - 0s 5ms/step - loss: 0.8427 - accuracy: 0.5750 - val_loss: 0.7379 - val_accuracy: 0.7000 Epoch 9/100 12/12 [==============================] - 0s 5ms/step - loss: 0.7102 - accuracy: 0.7292 - val_loss: 0.6010 - val_accuracy: 0.8167 Epoch 10/100 12/12 [==============================] - 0s 4ms/step - loss: 0.5572 - accuracy: 0.7917 - val_loss: 0.4865 - val_accuracy: 0.8167 Epoch 11/100 12/12 [==============================] - 0s 5ms/step - loss: 0.5107 - accuracy: 0.8167 - val_loss: 0.4676 - val_accuracy: 0.8500 Epoch 12/100 12/12 [==============================] - 0s 4ms/step - loss: 0.3793 - accuracy: 0.8833 - val_loss: 0.4695 - val_accuracy: 0.7833 Epoch 13/100 12/12 [==============================] - 0s 4ms/step - loss: 0.3837 - accuracy: 0.8583 - val_loss: 0.3716 - val_accuracy: 0.8333 Epoch 14/100 12/12 [==============================] - 0s 4ms/step - loss: 0.3282 - accuracy: 0.8833 - val_loss: 0.3677 - val_accuracy: 0.8500 Epoch 15/100 12/12 [==============================] - 0s 5ms/step - loss: 0.2777 - accuracy: 0.9125 - val_loss: 0.3662 - val_accuracy: 0.8667 Epoch 16/100 12/12 [==============================] - 0s 4ms/step - loss: 0.2536 - accuracy: 0.9208 - val_loss: 0.3261 - val_accuracy: 0.8667 Epoch 17/100 12/12 [==============================] - 0s 3ms/step - loss: 0.2008 - accuracy: 0.9458 - val_loss: 0.3115 - val_accuracy: 0.8500 Epoch 18/100 12/12 [==============================] - 0s 6ms/step - loss: 0.1649 - accuracy: 0.9458 - val_loss: 0.3499 - val_accuracy: 0.8667 Epoch 19/100 12/12 [==============================] - 0s 5ms/step - loss: 0.1265 - accuracy: 0.9583 - val_loss: 0.3479 - val_accuracy: 0.8833 Epoch 20/100 12/12 [==============================] - 0s 4ms/step - loss: 0.1588 - accuracy: 0.9458 - val_loss: 0.3476 - val_accuracy: 0.8667 Epoch 21/100 12/12 [==============================] - 0s 3ms/step - loss: 0.1662 - accuracy: 0.9458 - val_loss: 0.3211 - val_accuracy: 0.8667 Epoch 22/100 12/12 [==============================] - 0s 3ms/step - loss: 0.1309 - accuracy: 0.9417 - val_loss: 0.3586 - val_accuracy: 0.8500 Epoch 23/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0895 - accuracy: 0.9625 - val_loss: 0.3904 - val_accuracy: 0.8500 Epoch 24/100 12/12 [==============================] - 0s 3ms/step - loss: 0.1067 - accuracy: 0.9625 - val_loss: 0.5581 - val_accuracy: 0.8167 Epoch 25/100 12/12 [==============================] - 0s 6ms/step - loss: 0.1033 - accuracy: 0.9667 - val_loss: 0.3578 - val_accuracy: 0.9000 Epoch 26/100 12/12 [==============================] - 0s 4ms/step - loss: 0.0626 - accuracy: 0.9917 - val_loss: 0.3928 - val_accuracy: 0.8833 Epoch 27/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0536 - accuracy: 0.9917 - val_loss: 0.3721 - val_accuracy: 0.9000 Epoch 28/100 12/12 [==============================] - 0s 4ms/step - loss: 0.0527 - accuracy: 0.9875 - val_loss: 0.4973 - val_accuracy: 0.8667 Epoch 29/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0808 - accuracy: 0.9708 - val_loss: 0.4575 - val_accuracy: 0.8833 Epoch 30/100 12/12 [==============================] - 0s 3ms/step - loss: 0.1053 - accuracy: 0.9667 - val_loss: 0.4273 - val_accuracy: 0.8667 Epoch 31/100 12/12 [==============================] - 0s 5ms/step - loss: 0.0559 - accuracy: 0.9958 - val_loss: 0.4082 - val_accuracy: 0.9167 Epoch 32/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0435 - accuracy: 0.9875 - val_loss: 0.5094 - val_accuracy: 0.8667 Epoch 33/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0429 - accuracy: 0.9917 - val_loss: 0.4551 - val_accuracy: 0.9167 Epoch 34/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0357 - accuracy: 0.9917 - val_loss: 0.5260 - val_accuracy: 0.8833 Epoch 35/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0365 - accuracy: 0.9958 - val_loss: 0.4955 - val_accuracy: 0.9000 Epoch 36/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0282 - accuracy: 1.0000 - val_loss: 0.3949 - val_accuracy: 0.9167 Epoch 37/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0303 - accuracy: 0.9875 - val_loss: 0.4732 - val_accuracy: 0.9167 Epoch 38/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0169 - accuracy: 1.0000 - val_loss: 0.4634 - val_accuracy: 0.9000 Epoch 39/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0188 - accuracy: 0.9958 - val_loss: 0.4746 - val_accuracy: 0.9167 Epoch 40/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0140 - accuracy: 0.9958 - val_loss: 0.5232 - val_accuracy: 0.8833 Epoch 41/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0119 - accuracy: 0.9958 - val_loss: 0.4813 - val_accuracy: 0.9167 Epoch 42/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0168 - accuracy: 0.9958 - val_loss: 0.4460 - val_accuracy: 0.9167 Epoch 43/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0163 - accuracy: 0.9958 - val_loss: 0.4505 - val_accuracy: 0.9167 Epoch 44/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0164 - accuracy: 0.9958 - val_loss: 0.4882 - val_accuracy: 0.9167 Epoch 45/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0250 - accuracy: 0.9917 - val_loss: 0.5581 - val_accuracy: 0.9167 Epoch 46/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0245 - accuracy: 0.9958 - val_loss: 0.4527 - val_accuracy: 0.9167 Epoch 47/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0253 - accuracy: 0.9917 - val_loss: 0.4838 - val_accuracy: 0.9000 Epoch 48/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0148 - accuracy: 0.9958 - val_loss: 0.4740 - val_accuracy: 0.9000 Epoch 49/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0104 - accuracy: 1.0000 - val_loss: 0.4538 - val_accuracy: 0.9167 Epoch 50/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0127 - accuracy: 0.9958 - val_loss: 0.5382 - val_accuracy: 0.9000 Epoch 51/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0139 - accuracy: 0.9958 - val_loss: 0.4694 - val_accuracy: 0.9167 Epoch 52/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0361 - accuracy: 0.9917 - val_loss: 0.4307 - val_accuracy: 0.9167 Epoch 53/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0125 - accuracy: 0.9958 - val_loss: 0.5136 - val_accuracy: 0.9167 Epoch 54/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0076 - accuracy: 1.0000 - val_loss: 0.4797 - val_accuracy: 0.9000 Epoch 55/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0231 - accuracy: 0.9875 - val_loss: 0.5369 - val_accuracy: 0.9167 Epoch 56/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0115 - accuracy: 1.0000 - val_loss: 0.5223 - val_accuracy: 0.9167 Epoch 57/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0115 - accuracy: 0.9958 - val_loss: 0.6016 - val_accuracy: 0.9000 Epoch 58/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0081 - accuracy: 1.0000 - val_loss: 0.7670 - val_accuracy: 0.8500 Epoch 59/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0194 - accuracy: 0.9958 - val_loss: 0.5915 - val_accuracy: 0.9167 Epoch 60/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0074 - accuracy: 1.0000 - val_loss: 0.5328 - val_accuracy: 0.9000 Epoch 61/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0078 - accuracy: 1.0000 - val_loss: 0.5192 - val_accuracy: 0.9167 Epoch 62/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0102 - accuracy: 0.9958 - val_loss: 0.5077 - val_accuracy: 0.9167 Epoch 63/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0063 - accuracy: 1.0000 - val_loss: 0.5320 - val_accuracy: 0.9000 Epoch 64/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0095 - accuracy: 0.9958 - val_loss: 0.6002 - val_accuracy: 0.8833 Epoch 65/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0232 - accuracy: 0.9958 - val_loss: 1.0127 - val_accuracy: 0.8333 Epoch 66/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0255 - accuracy: 0.9917 - val_loss: 0.5754 - val_accuracy: 0.9167 Epoch 67/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0201 - accuracy: 0.9917 - val_loss: 0.5047 - val_accuracy: 0.8833 Epoch 68/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0189 - accuracy: 0.9958 - val_loss: 0.6710 - val_accuracy: 0.8833 Epoch 69/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0140 - accuracy: 1.0000 - val_loss: 0.5965 - val_accuracy: 0.9000 Epoch 70/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0070 - accuracy: 1.0000 - val_loss: 0.5595 - val_accuracy: 0.9167 Epoch 71/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0087 - accuracy: 1.0000 - val_loss: 0.5850 - val_accuracy: 0.8833 Epoch 72/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0078 - accuracy: 1.0000 - val_loss: 0.5876 - val_accuracy: 0.9167 Epoch 73/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0120 - accuracy: 0.9958 - val_loss: 0.6837 - val_accuracy: 0.9167 Epoch 74/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0056 - accuracy: 1.0000 - val_loss: 0.6401 - val_accuracy: 0.9000 Epoch 75/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0105 - accuracy: 0.9958 - val_loss: 0.6296 - val_accuracy: 0.9000 Epoch 76/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0069 - accuracy: 1.0000 - val_loss: 0.5819 - val_accuracy: 0.9000 Epoch 77/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0051 - accuracy: 1.0000 - val_loss: 0.5993 - val_accuracy: 0.9167 Epoch 78/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 0.6399 - val_accuracy: 0.9167 Epoch 79/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0117 - accuracy: 0.9958 - val_loss: 0.7028 - val_accuracy: 0.9000 Epoch 80/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0067 - accuracy: 1.0000 - val_loss: 0.6796 - val_accuracy: 0.8833 Epoch 81/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0148 - accuracy: 0.9958 - val_loss: 0.8375 - val_accuracy: 0.8667 Epoch 82/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0425 - accuracy: 0.9833 - val_loss: 1.1823 - val_accuracy: 0.8167 Epoch 83/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0220 - accuracy: 0.9958 - val_loss: 0.7019 - val_accuracy: 0.9000 Epoch 84/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0039 - accuracy: 1.0000 - val_loss: 0.8417 - val_accuracy: 0.8667 Epoch 85/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0077 - accuracy: 1.0000 - val_loss: 0.7788 - val_accuracy: 0.8500 Epoch 86/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 0.7818 - val_accuracy: 0.9000 Epoch 87/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0123 - accuracy: 0.9958 - val_loss: 0.7559 - val_accuracy: 0.9000 Epoch 88/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0090 - accuracy: 0.9958 - val_loss: 0.7779 - val_accuracy: 0.9000 Epoch 89/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0159 - accuracy: 0.9958 - val_loss: 0.9408 - val_accuracy: 0.8500 Epoch 90/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0065 - accuracy: 1.0000 - val_loss: 0.7213 - val_accuracy: 0.9167 Epoch 91/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0096 - accuracy: 1.0000 - val_loss: 0.6934 - val_accuracy: 0.9000 Epoch 92/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0045 - accuracy: 1.0000 - val_loss: 0.7442 - val_accuracy: 0.9167 Epoch 93/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0154 - accuracy: 0.9958 - val_loss: 0.7858 - val_accuracy: 0.9167 Epoch 94/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0046 - accuracy: 1.0000 - val_loss: 0.8481 - val_accuracy: 0.9000 Epoch 95/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0040 - accuracy: 1.0000 - val_loss: 0.7743 - val_accuracy: 0.9167 Epoch 96/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0138 - accuracy: 0.9958 - val_loss: 0.7438 - val_accuracy: 0.9000 Epoch 97/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0088 - accuracy: 1.0000 - val_loss: 0.7356 - val_accuracy: 0.8833 Epoch 98/100 12/12 [==============================] - 0s 5ms/step - loss: 0.0086 - accuracy: 0.9958 - val_loss: 0.8468 - val_accuracy: 0.9333 Epoch 99/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0112 - accuracy: 0.9917 - val_loss: 0.8711 - val_accuracy: 0.9333 Epoch 100/100 12/12 [==============================] - 0s 3ms/step - loss: 0.0091 - accuracy: 0.9958 - val_loss: 0.8142 - val_accuracy: 0.9167
Result and Prediction¶
In [24]:
model.load_weights('best.hdf5')
model.evaluate(X_test,y_test, batch_size=20)
3/3 [==============================] - 0s 1ms/step - loss: 0.2314 - accuracy: 0.9833
Out[24]:
[0.23137156665325165, 0.9833333492279053]
In [25]:
pred= model.predict(X_test)
predictions= np.argmax(pred, axis=1)
predictions
Out[25]:
array([1, 0, 1, 1, 0, 0, 1, 2, 2, 2, 1, 0, 2, 1, 2, 1, 2, 2, 0, 2, 1, 0, 1, 0, 0, 2, 1, 1, 2, 1, 1, 2, 2, 0, 0, 0, 1, 0, 1, 0, 0, 2, 1, 1, 1, 1, 0, 1, 2, 0, 2, 2, 2, 1, 2, 1, 0, 2, 0, 2])
Accuracy and Loss Graph¶
In [26]:
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
Out[26]:
[<matplotlib.lines.Line2D at 0x7f22400f99b0>]
In [27]:
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
Out[27]:
[<matplotlib.lines.Line2D at 0x7f225abd79e8>]
Testing¶
In [28]:
plt.imshow(X_test[2])
Out[28]:
<matplotlib.image.AxesImage at 0x7f21d07e2668>
In [29]:
y_train[2]
Out[29]:
array([0., 1., 0.], dtype=float32)
In [30]:
predictions[2]
Out[30]:
1
DeepCC¶
In [31]:
!deepCC best.h5
[INFO] Reading [keras model] 'best.h5' [SUCCESS] Saved 'best_deepC/best.onnx' [INFO] Reading [onnx model] 'best_deepC/best.onnx' [INFO] Model info: ir_vesion : 5 doc : [WARNING] [ONNX]: graph-node conv2d's attribute auto_pad has no meaningful data. [WARNING] [ONNX]: terminal (input/output) conv2d_input's shape is less than 1. Changing it to 1. [WARNING] [ONNX]: terminal (input/output) dense_1's shape is less than 1. Changing it to 1. WARN (GRAPH): found operator node with the same name (dense_1) as io node. [INFO] Running DNNC graph sanity check ... [SUCCESS] Passed sanity check. [INFO] Writing C++ file 'best_deepC/best.cpp' [INFO] deepSea model files are ready in 'best_deepC/' [RUNNING COMMAND] g++ -std=c++11 -O3 -fno-rtti -fno-exceptions -I. -I/opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/include -isystem /opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/packages/eigen-eigen-323c052e1731 "best_deepC/best.cpp" -D_AITS_MAIN -o "best_deepC/best.exe" [RUNNING COMMAND] size "best_deepC/best.exe" text data bss dec hex filename 690061 3784 760 694605 a994d best_deepC/best.exe [SUCCESS] Saved model as executable "best_deepC/best.exe"
In [ ]: