Cainvas
In [1]:
import pandas as pd
import numpy as np
import seaborn as sns

Import Data

In [2]:
!wget https://cainvas-static.s3.amazonaws.com/media/user_data/SiddharthGan/lowerback.zip

!unzip -qo lowerback.zip

# zip folder is not needed anymore
!rm lowerback.zip
--2021-09-08 06:50:44--  https://cainvas-static.s3.amazonaws.com/media/user_data/SiddharthGan/lowerback.zip
Resolving cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)... 52.219.156.19
Connecting to cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)|52.219.156.19|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 20261 (20K) [application/zip]
Saving to: ‘lowerback.zip’

lowerback.zip       100%[===================>]  19.79K  --.-KB/s    in 0.001s  

2021-09-08 06:50:44 (32.6 MB/s) - ‘lowerback.zip’ saved [20261/20261]

In [3]:
data = pd.read_csv('Dataset_spine.csv')

Data Visualization

In [4]:
data.head()
Out[4]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att Unnamed: 13
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 Abnormal NaN
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 Abnormal NaN
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 Abnormal Prediction is done by using binary classificat...
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 Abnormal NaN
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 Abnormal NaN
In [5]:
data.drop(['Unnamed: 13'], axis=1, inplace=True)

data.head()
Out[5]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 Abnormal
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 Abnormal
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 Abnormal
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 Abnormal
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 Abnormal
In [6]:
data['Class_att'] = data['Class_att'].map({'Abnormal': 1, 'Normal': 0})

data.head()
Out[6]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 1
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 1
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 1
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 1
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 1
In [7]:
data = data.rename(columns={'Col1': 'pelvic_incidence', 
                            'Col2': 'pelvic_tilt', 
                            'Col3': 'lumbar_lordosis_angle', 
                            'Col4': 'sacral_slope', 
                            'Col5': 'pelvic_radius', 
                            'Col6': 'degree_spondylolisthesis', 
                            'Col7': 'pelvic_slope', 
                            'Col8': 'direct_tilt', 
                            'Col9': 'thoracic_slope', 
                            'Col10': 'cervical_tilt', 
                            'Col11': 'sacrum_angle', 
                            'Col12': 'scoliosis_slope', 
                            'Class_att': 'class'})
In [8]:
data.head()
Out[8]:
pelvic_incidence pelvic_tilt lumbar_lordosis_angle sacral_slope pelvic_radius degree_spondylolisthesis pelvic_slope direct_tilt thoracic_slope cervical_tilt sacrum_angle scoliosis_slope class
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 1
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 1
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 1
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 1
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 1
In [9]:
data.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 310 entries, 0 to 309
Data columns (total 13 columns):
 #   Column                    Non-Null Count  Dtype  
---  ------                    --------------  -----  
 0   pelvic_incidence          310 non-null    float64
 1   pelvic_tilt               310 non-null    float64
 2   lumbar_lordosis_angle     310 non-null    float64
 3   sacral_slope              310 non-null    float64
 4   pelvic_radius             310 non-null    float64
 5   degree_spondylolisthesis  310 non-null    float64
 6   pelvic_slope              310 non-null    float64
 7   direct_tilt               310 non-null    float64
 8   thoracic_slope            310 non-null    float64
 9   cervical_tilt             310 non-null    float64
 10  sacrum_angle              310 non-null    float64
 11  scoliosis_slope           310 non-null    float64
 12  class                     310 non-null    int64  
dtypes: float64(12), int64(1)
memory usage: 31.6 KB
In [10]:
data.describe()
Out[10]:
pelvic_incidence pelvic_tilt lumbar_lordosis_angle sacral_slope pelvic_radius degree_spondylolisthesis pelvic_slope direct_tilt thoracic_slope cervical_tilt sacrum_angle scoliosis_slope class
count 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000
mean 60.496653 17.542822 51.930930 42.953831 117.920655 26.296694 0.472979 21.321526 13.064511 11.933317 -14.053139 25.645981 0.677419
std 17.236520 10.008330 18.554064 13.423102 13.317377 37.559027 0.285787 8.639423 3.399713 2.893265 12.225582 10.450558 0.468220
min 26.147921 -6.554948 14.000000 13.366931 70.082575 -11.058179 0.003220 7.027000 7.037800 7.030600 -35.287375 7.007900 0.000000
25% 46.430294 10.667069 37.000000 33.347122 110.709196 1.603727 0.224367 13.054400 10.417800 9.541140 -24.289522 17.189075 0.000000
50% 58.691038 16.357689 49.562398 42.404912 118.268178 11.767934 0.475989 21.907150 12.938450 11.953835 -14.622856 24.931950 1.000000
75% 72.877696 22.120395 63.000000 52.695888 125.467674 41.287352 0.704846 28.954075 15.889525 14.371810 -3.497094 33.979600 1.000000
max 129.834041 49.431864 125.742385 121.429566 163.071041 418.543082 0.998827 36.743900 19.324000 16.821080 6.972071 44.341200 1.000000
In [11]:
import matplotlib.pyplot as plt
import seaborn as sns
In [12]:
%matplotlib inline
sns.set_style('whitegrid')
In [13]:
plt.figure(figsize=(12,9))
sns.heatmap(data.corr(), annot=True)
Out[13]:
<AxesSubplot:>
In [14]:
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA


scaler = StandardScaler()

y = data['class'].values
X = scaler.fit_transform(data[data.columns[:-1]])

Train Test Split

In [15]:
from sklearn.model_selection import train_test_split
In [16]:
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=101)
In [17]:
import tensorflow
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.models import Sequential

Model Architecture

In [18]:
model = Sequential()
model.add(Dense(64, activation='relu', input_shape=(12,)))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 64)                832       
_________________________________________________________________
dropout (Dropout)            (None, 64)                0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 65        
=================================================================
Total params: 897
Trainable params: 897
Non-trainable params: 0
_________________________________________________________________

Training the model

In [19]:
history = model.fit(X_train, y_train, batch_size=32, epochs=300, verbose=2, validation_split=0.2)
Epoch 1/300
6/6 - 0s - loss: 0.7144 - accuracy: 0.5405 - val_loss: 0.6668 - val_accuracy: 0.5957
Epoch 2/300
6/6 - 0s - loss: 0.6857 - accuracy: 0.5784 - val_loss: 0.6358 - val_accuracy: 0.6170
Epoch 3/300
6/6 - 0s - loss: 0.6582 - accuracy: 0.6270 - val_loss: 0.6139 - val_accuracy: 0.6809
Epoch 4/300
6/6 - 0s - loss: 0.6528 - accuracy: 0.6216 - val_loss: 0.5979 - val_accuracy: 0.7021
Epoch 5/300
6/6 - 0s - loss: 0.6156 - accuracy: 0.6703 - val_loss: 0.5842 - val_accuracy: 0.7021
Epoch 6/300
6/6 - 0s - loss: 0.5692 - accuracy: 0.7514 - val_loss: 0.5692 - val_accuracy: 0.7234
Epoch 7/300
6/6 - 0s - loss: 0.6030 - accuracy: 0.6486 - val_loss: 0.5588 - val_accuracy: 0.7234
Epoch 8/300
6/6 - 0s - loss: 0.6035 - accuracy: 0.6378 - val_loss: 0.5476 - val_accuracy: 0.7234
Epoch 9/300
6/6 - 0s - loss: 0.5719 - accuracy: 0.7081 - val_loss: 0.5383 - val_accuracy: 0.7234
Epoch 10/300
6/6 - 0s - loss: 0.5564 - accuracy: 0.7081 - val_loss: 0.5312 - val_accuracy: 0.7234
Epoch 11/300
6/6 - 0s - loss: 0.5179 - accuracy: 0.7568 - val_loss: 0.5238 - val_accuracy: 0.7234
Epoch 12/300
6/6 - 0s - loss: 0.5010 - accuracy: 0.7459 - val_loss: 0.5176 - val_accuracy: 0.7021
Epoch 13/300
6/6 - 0s - loss: 0.5279 - accuracy: 0.7243 - val_loss: 0.5117 - val_accuracy: 0.7234
Epoch 14/300
6/6 - 0s - loss: 0.5079 - accuracy: 0.7622 - val_loss: 0.5065 - val_accuracy: 0.7447
Epoch 15/300
6/6 - 0s - loss: 0.5281 - accuracy: 0.6973 - val_loss: 0.5013 - val_accuracy: 0.7447
Epoch 16/300
6/6 - 0s - loss: 0.4886 - accuracy: 0.7730 - val_loss: 0.4949 - val_accuracy: 0.7447
Epoch 17/300
6/6 - 0s - loss: 0.4922 - accuracy: 0.7351 - val_loss: 0.4915 - val_accuracy: 0.7447
Epoch 18/300
6/6 - 0s - loss: 0.4786 - accuracy: 0.7622 - val_loss: 0.4875 - val_accuracy: 0.7447
Epoch 19/300
6/6 - 0s - loss: 0.4952 - accuracy: 0.7676 - val_loss: 0.4836 - val_accuracy: 0.7660
Epoch 20/300
6/6 - 0s - loss: 0.4587 - accuracy: 0.7784 - val_loss: 0.4801 - val_accuracy: 0.7660
Epoch 21/300
6/6 - 0s - loss: 0.4793 - accuracy: 0.7514 - val_loss: 0.4764 - val_accuracy: 0.7660
Epoch 22/300
6/6 - 0s - loss: 0.4537 - accuracy: 0.8216 - val_loss: 0.4725 - val_accuracy: 0.7660
Epoch 23/300
6/6 - 0s - loss: 0.4599 - accuracy: 0.8000 - val_loss: 0.4688 - val_accuracy: 0.7872
Epoch 24/300
6/6 - 0s - loss: 0.4560 - accuracy: 0.7838 - val_loss: 0.4653 - val_accuracy: 0.7660
Epoch 25/300
6/6 - 0s - loss: 0.4797 - accuracy: 0.7784 - val_loss: 0.4618 - val_accuracy: 0.7872
Epoch 26/300
6/6 - 0s - loss: 0.4726 - accuracy: 0.7784 - val_loss: 0.4594 - val_accuracy: 0.7872
Epoch 27/300
6/6 - 0s - loss: 0.4370 - accuracy: 0.8054 - val_loss: 0.4576 - val_accuracy: 0.7872
Epoch 28/300
6/6 - 0s - loss: 0.4428 - accuracy: 0.7838 - val_loss: 0.4548 - val_accuracy: 0.7872
Epoch 29/300
6/6 - 0s - loss: 0.4488 - accuracy: 0.7784 - val_loss: 0.4526 - val_accuracy: 0.7872
Epoch 30/300
6/6 - 0s - loss: 0.4283 - accuracy: 0.8108 - val_loss: 0.4496 - val_accuracy: 0.7872
Epoch 31/300
6/6 - 0s - loss: 0.4285 - accuracy: 0.8054 - val_loss: 0.4472 - val_accuracy: 0.7872
Epoch 32/300
6/6 - 0s - loss: 0.4444 - accuracy: 0.7892 - val_loss: 0.4438 - val_accuracy: 0.7872
Epoch 33/300
6/6 - 0s - loss: 0.3922 - accuracy: 0.8432 - val_loss: 0.4413 - val_accuracy: 0.8085
Epoch 34/300
6/6 - 0s - loss: 0.4516 - accuracy: 0.7946 - val_loss: 0.4392 - val_accuracy: 0.8085
Epoch 35/300
6/6 - 0s - loss: 0.4071 - accuracy: 0.8324 - val_loss: 0.4360 - val_accuracy: 0.8085
Epoch 36/300
6/6 - 0s - loss: 0.4259 - accuracy: 0.7946 - val_loss: 0.4330 - val_accuracy: 0.8085
Epoch 37/300
6/6 - 0s - loss: 0.4173 - accuracy: 0.8270 - val_loss: 0.4307 - val_accuracy: 0.8085
Epoch 38/300
6/6 - 0s - loss: 0.4100 - accuracy: 0.8054 - val_loss: 0.4295 - val_accuracy: 0.7872
Epoch 39/300
6/6 - 0s - loss: 0.4159 - accuracy: 0.8216 - val_loss: 0.4270 - val_accuracy: 0.7872
Epoch 40/300
6/6 - 0s - loss: 0.4202 - accuracy: 0.8054 - val_loss: 0.4251 - val_accuracy: 0.7872
Epoch 41/300
6/6 - 0s - loss: 0.4152 - accuracy: 0.8108 - val_loss: 0.4235 - val_accuracy: 0.7872
Epoch 42/300
6/6 - 0s - loss: 0.4167 - accuracy: 0.8108 - val_loss: 0.4213 - val_accuracy: 0.7872
Epoch 43/300
6/6 - 0s - loss: 0.3672 - accuracy: 0.8486 - val_loss: 0.4196 - val_accuracy: 0.7872
Epoch 44/300
6/6 - 0s - loss: 0.3881 - accuracy: 0.8432 - val_loss: 0.4174 - val_accuracy: 0.7872
Epoch 45/300
6/6 - 0s - loss: 0.3829 - accuracy: 0.8108 - val_loss: 0.4156 - val_accuracy: 0.7872
Epoch 46/300
6/6 - 0s - loss: 0.3845 - accuracy: 0.8270 - val_loss: 0.4132 - val_accuracy: 0.7872
Epoch 47/300
6/6 - 0s - loss: 0.3959 - accuracy: 0.8000 - val_loss: 0.4115 - val_accuracy: 0.7872
Epoch 48/300
6/6 - 0s - loss: 0.3749 - accuracy: 0.8432 - val_loss: 0.4100 - val_accuracy: 0.7872
Epoch 49/300
6/6 - 0s - loss: 0.3847 - accuracy: 0.8162 - val_loss: 0.4076 - val_accuracy: 0.7872
Epoch 50/300
6/6 - 0s - loss: 0.3725 - accuracy: 0.8216 - val_loss: 0.4053 - val_accuracy: 0.7872
Epoch 51/300
6/6 - 0s - loss: 0.3625 - accuracy: 0.8216 - val_loss: 0.4030 - val_accuracy: 0.8085
Epoch 52/300
6/6 - 0s - loss: 0.3827 - accuracy: 0.8000 - val_loss: 0.4012 - val_accuracy: 0.8085
Epoch 53/300
6/6 - 0s - loss: 0.3509 - accuracy: 0.8432 - val_loss: 0.3985 - val_accuracy: 0.8298
Epoch 54/300
6/6 - 0s - loss: 0.3873 - accuracy: 0.8216 - val_loss: 0.3975 - val_accuracy: 0.8298
Epoch 55/300
6/6 - 0s - loss: 0.3855 - accuracy: 0.8000 - val_loss: 0.3966 - val_accuracy: 0.8298
Epoch 56/300
6/6 - 0s - loss: 0.3494 - accuracy: 0.8703 - val_loss: 0.3950 - val_accuracy: 0.8511
Epoch 57/300
6/6 - 0s - loss: 0.3550 - accuracy: 0.8270 - val_loss: 0.3936 - val_accuracy: 0.8511
Epoch 58/300
6/6 - 0s - loss: 0.3595 - accuracy: 0.8216 - val_loss: 0.3926 - val_accuracy: 0.8298
Epoch 59/300
6/6 - 0s - loss: 0.3681 - accuracy: 0.8324 - val_loss: 0.3915 - val_accuracy: 0.8298
Epoch 60/300
6/6 - 0s - loss: 0.3372 - accuracy: 0.8486 - val_loss: 0.3892 - val_accuracy: 0.8511
Epoch 61/300
6/6 - 0s - loss: 0.3220 - accuracy: 0.8757 - val_loss: 0.3875 - val_accuracy: 0.8298
Epoch 62/300
6/6 - 0s - loss: 0.3470 - accuracy: 0.8595 - val_loss: 0.3862 - val_accuracy: 0.8298
Epoch 63/300
6/6 - 0s - loss: 0.3425 - accuracy: 0.8432 - val_loss: 0.3845 - val_accuracy: 0.8511
Epoch 64/300
6/6 - 0s - loss: 0.3543 - accuracy: 0.7946 - val_loss: 0.3835 - val_accuracy: 0.8298
Epoch 65/300
6/6 - 0s - loss: 0.3291 - accuracy: 0.8595 - val_loss: 0.3814 - val_accuracy: 0.8298
Epoch 66/300
6/6 - 0s - loss: 0.3346 - accuracy: 0.8541 - val_loss: 0.3802 - val_accuracy: 0.8298
Epoch 67/300
6/6 - 0s - loss: 0.3430 - accuracy: 0.8595 - val_loss: 0.3792 - val_accuracy: 0.8298
Epoch 68/300
6/6 - 0s - loss: 0.3244 - accuracy: 0.8486 - val_loss: 0.3774 - val_accuracy: 0.8298
Epoch 69/300
6/6 - 0s - loss: 0.3273 - accuracy: 0.8541 - val_loss: 0.3771 - val_accuracy: 0.8298
Epoch 70/300
6/6 - 0s - loss: 0.3160 - accuracy: 0.8703 - val_loss: 0.3746 - val_accuracy: 0.8511
Epoch 71/300
6/6 - 0s - loss: 0.3250 - accuracy: 0.8378 - val_loss: 0.3725 - val_accuracy: 0.8298
Epoch 72/300
6/6 - 0s - loss: 0.3160 - accuracy: 0.8703 - val_loss: 0.3718 - val_accuracy: 0.8298
Epoch 73/300
6/6 - 0s - loss: 0.3240 - accuracy: 0.8649 - val_loss: 0.3705 - val_accuracy: 0.8511
Epoch 74/300
6/6 - 0s - loss: 0.2970 - accuracy: 0.8757 - val_loss: 0.3687 - val_accuracy: 0.8511
Epoch 75/300
6/6 - 0s - loss: 0.3400 - accuracy: 0.8649 - val_loss: 0.3676 - val_accuracy: 0.8511
Epoch 76/300
6/6 - 0s - loss: 0.3134 - accuracy: 0.8486 - val_loss: 0.3675 - val_accuracy: 0.8511
Epoch 77/300
6/6 - 0s - loss: 0.3025 - accuracy: 0.8703 - val_loss: 0.3652 - val_accuracy: 0.8511
Epoch 78/300
6/6 - 0s - loss: 0.3213 - accuracy: 0.8486 - val_loss: 0.3631 - val_accuracy: 0.8511
Epoch 79/300
6/6 - 0s - loss: 0.2915 - accuracy: 0.8595 - val_loss: 0.3618 - val_accuracy: 0.8511
Epoch 80/300
6/6 - 0s - loss: 0.3235 - accuracy: 0.8541 - val_loss: 0.3617 - val_accuracy: 0.8511
Epoch 81/300
6/6 - 0s - loss: 0.2881 - accuracy: 0.8703 - val_loss: 0.3597 - val_accuracy: 0.8298
Epoch 82/300
6/6 - 0s - loss: 0.2830 - accuracy: 0.8919 - val_loss: 0.3570 - val_accuracy: 0.8298
Epoch 83/300
6/6 - 0s - loss: 0.3289 - accuracy: 0.8757 - val_loss: 0.3561 - val_accuracy: 0.8298
Epoch 84/300
6/6 - 0s - loss: 0.2830 - accuracy: 0.8919 - val_loss: 0.3540 - val_accuracy: 0.8298
Epoch 85/300
6/6 - 0s - loss: 0.3086 - accuracy: 0.8649 - val_loss: 0.3529 - val_accuracy: 0.8298
Epoch 86/300
6/6 - 0s - loss: 0.3056 - accuracy: 0.8703 - val_loss: 0.3518 - val_accuracy: 0.8298
Epoch 87/300
6/6 - 0s - loss: 0.3103 - accuracy: 0.8757 - val_loss: 0.3515 - val_accuracy: 0.8298
Epoch 88/300
6/6 - 0s - loss: 0.2909 - accuracy: 0.8595 - val_loss: 0.3504 - val_accuracy: 0.8298
Epoch 89/300
6/6 - 0s - loss: 0.2992 - accuracy: 0.8595 - val_loss: 0.3487 - val_accuracy: 0.8298
Epoch 90/300
6/6 - 0s - loss: 0.2948 - accuracy: 0.8595 - val_loss: 0.3476 - val_accuracy: 0.8298
Epoch 91/300
6/6 - 0s - loss: 0.2838 - accuracy: 0.8595 - val_loss: 0.3480 - val_accuracy: 0.8298
Epoch 92/300
6/6 - 0s - loss: 0.3125 - accuracy: 0.8811 - val_loss: 0.3477 - val_accuracy: 0.8298
Epoch 93/300
6/6 - 0s - loss: 0.2764 - accuracy: 0.8973 - val_loss: 0.3472 - val_accuracy: 0.8298
Epoch 94/300
6/6 - 0s - loss: 0.2956 - accuracy: 0.8703 - val_loss: 0.3456 - val_accuracy: 0.8298
Epoch 95/300
6/6 - 0s - loss: 0.3008 - accuracy: 0.8595 - val_loss: 0.3441 - val_accuracy: 0.8298
Epoch 96/300
6/6 - 0s - loss: 0.2835 - accuracy: 0.8973 - val_loss: 0.3434 - val_accuracy: 0.8298
Epoch 97/300
6/6 - 0s - loss: 0.3094 - accuracy: 0.8649 - val_loss: 0.3438 - val_accuracy: 0.8298
Epoch 98/300
6/6 - 0s - loss: 0.2847 - accuracy: 0.8703 - val_loss: 0.3430 - val_accuracy: 0.8298
Epoch 99/300
6/6 - 0s - loss: 0.2866 - accuracy: 0.8757 - val_loss: 0.3418 - val_accuracy: 0.8298
Epoch 100/300
6/6 - 0s - loss: 0.2642 - accuracy: 0.8865 - val_loss: 0.3400 - val_accuracy: 0.8298
Epoch 101/300
6/6 - 0s - loss: 0.2752 - accuracy: 0.8919 - val_loss: 0.3395 - val_accuracy: 0.8298
Epoch 102/300
6/6 - 0s - loss: 0.2447 - accuracy: 0.9243 - val_loss: 0.3386 - val_accuracy: 0.8298
Epoch 103/300
6/6 - 0s - loss: 0.2877 - accuracy: 0.8541 - val_loss: 0.3371 - val_accuracy: 0.8298
Epoch 104/300
6/6 - 0s - loss: 0.2809 - accuracy: 0.8811 - val_loss: 0.3369 - val_accuracy: 0.8298
Epoch 105/300
6/6 - 0s - loss: 0.2649 - accuracy: 0.8811 - val_loss: 0.3357 - val_accuracy: 0.8298
Epoch 106/300
6/6 - 0s - loss: 0.2467 - accuracy: 0.9135 - val_loss: 0.3355 - val_accuracy: 0.8298
Epoch 107/300
6/6 - 0s - loss: 0.2600 - accuracy: 0.8811 - val_loss: 0.3358 - val_accuracy: 0.8298
Epoch 108/300
6/6 - 0s - loss: 0.2637 - accuracy: 0.8973 - val_loss: 0.3353 - val_accuracy: 0.8298
Epoch 109/300
6/6 - 0s - loss: 0.2626 - accuracy: 0.8703 - val_loss: 0.3361 - val_accuracy: 0.8298
Epoch 110/300
6/6 - 0s - loss: 0.2745 - accuracy: 0.8703 - val_loss: 0.3347 - val_accuracy: 0.8298
Epoch 111/300
6/6 - 0s - loss: 0.2279 - accuracy: 0.9297 - val_loss: 0.3345 - val_accuracy: 0.8298
Epoch 112/300
6/6 - 0s - loss: 0.2379 - accuracy: 0.9081 - val_loss: 0.3340 - val_accuracy: 0.8298
Epoch 113/300
6/6 - 0s - loss: 0.2668 - accuracy: 0.8649 - val_loss: 0.3316 - val_accuracy: 0.8298
Epoch 114/300
6/6 - 0s - loss: 0.2743 - accuracy: 0.8757 - val_loss: 0.3302 - val_accuracy: 0.8298
Epoch 115/300
6/6 - 0s - loss: 0.2297 - accuracy: 0.9081 - val_loss: 0.3303 - val_accuracy: 0.8298
Epoch 116/300
6/6 - 0s - loss: 0.2532 - accuracy: 0.8919 - val_loss: 0.3305 - val_accuracy: 0.8298
Epoch 117/300
6/6 - 0s - loss: 0.2486 - accuracy: 0.8919 - val_loss: 0.3306 - val_accuracy: 0.8298
Epoch 118/300
6/6 - 0s - loss: 0.2714 - accuracy: 0.8811 - val_loss: 0.3306 - val_accuracy: 0.8298
Epoch 119/300
6/6 - 0s - loss: 0.2675 - accuracy: 0.8757 - val_loss: 0.3299 - val_accuracy: 0.8298
Epoch 120/300
6/6 - 0s - loss: 0.2585 - accuracy: 0.8919 - val_loss: 0.3282 - val_accuracy: 0.8298
Epoch 121/300
6/6 - 0s - loss: 0.2417 - accuracy: 0.8865 - val_loss: 0.3274 - val_accuracy: 0.8298
Epoch 122/300
6/6 - 0s - loss: 0.2565 - accuracy: 0.8811 - val_loss: 0.3272 - val_accuracy: 0.8298
Epoch 123/300
6/6 - 0s - loss: 0.2463 - accuracy: 0.8919 - val_loss: 0.3244 - val_accuracy: 0.8298
Epoch 124/300
6/6 - 0s - loss: 0.2400 - accuracy: 0.9189 - val_loss: 0.3240 - val_accuracy: 0.8298
Epoch 125/300
6/6 - 0s - loss: 0.2223 - accuracy: 0.9189 - val_loss: 0.3239 - val_accuracy: 0.8298
Epoch 126/300
6/6 - 0s - loss: 0.2634 - accuracy: 0.8919 - val_loss: 0.3245 - val_accuracy: 0.8298
Epoch 127/300
6/6 - 0s - loss: 0.2405 - accuracy: 0.8919 - val_loss: 0.3230 - val_accuracy: 0.8298
Epoch 128/300
6/6 - 0s - loss: 0.2460 - accuracy: 0.9135 - val_loss: 0.3241 - val_accuracy: 0.8298
Epoch 129/300
6/6 - 0s - loss: 0.2562 - accuracy: 0.8973 - val_loss: 0.3239 - val_accuracy: 0.8298
Epoch 130/300
6/6 - 0s - loss: 0.2713 - accuracy: 0.8811 - val_loss: 0.3236 - val_accuracy: 0.8298
Epoch 131/300
6/6 - 0s - loss: 0.2422 - accuracy: 0.9081 - val_loss: 0.3229 - val_accuracy: 0.8298
Epoch 132/300
6/6 - 0s - loss: 0.2553 - accuracy: 0.9135 - val_loss: 0.3216 - val_accuracy: 0.8298
Epoch 133/300
6/6 - 0s - loss: 0.2677 - accuracy: 0.8973 - val_loss: 0.3204 - val_accuracy: 0.8298
Epoch 134/300
6/6 - 0s - loss: 0.2200 - accuracy: 0.8919 - val_loss: 0.3205 - val_accuracy: 0.8298
Epoch 135/300
6/6 - 0s - loss: 0.2371 - accuracy: 0.9135 - val_loss: 0.3201 - val_accuracy: 0.8298
Epoch 136/300
6/6 - 0s - loss: 0.2463 - accuracy: 0.8919 - val_loss: 0.3197 - val_accuracy: 0.8298
Epoch 137/300
6/6 - 0s - loss: 0.2287 - accuracy: 0.9081 - val_loss: 0.3217 - val_accuracy: 0.8298
Epoch 138/300
6/6 - 0s - loss: 0.2229 - accuracy: 0.9081 - val_loss: 0.3220 - val_accuracy: 0.8085
Epoch 139/300
6/6 - 0s - loss: 0.2453 - accuracy: 0.8919 - val_loss: 0.3222 - val_accuracy: 0.8085
Epoch 140/300
6/6 - 0s - loss: 0.2307 - accuracy: 0.8865 - val_loss: 0.3216 - val_accuracy: 0.8085
Epoch 141/300
6/6 - 0s - loss: 0.2569 - accuracy: 0.8865 - val_loss: 0.3207 - val_accuracy: 0.8298
Epoch 142/300
6/6 - 0s - loss: 0.2228 - accuracy: 0.8973 - val_loss: 0.3204 - val_accuracy: 0.8085
Epoch 143/300
6/6 - 0s - loss: 0.2239 - accuracy: 0.8919 - val_loss: 0.3212 - val_accuracy: 0.8085
Epoch 144/300
6/6 - 0s - loss: 0.2343 - accuracy: 0.9081 - val_loss: 0.3197 - val_accuracy: 0.8085
Epoch 145/300
6/6 - 0s - loss: 0.2411 - accuracy: 0.9081 - val_loss: 0.3193 - val_accuracy: 0.8085
Epoch 146/300
6/6 - 0s - loss: 0.2247 - accuracy: 0.9189 - val_loss: 0.3208 - val_accuracy: 0.8085
Epoch 147/300
6/6 - 0s - loss: 0.2250 - accuracy: 0.8865 - val_loss: 0.3204 - val_accuracy: 0.8085
Epoch 148/300
6/6 - 0s - loss: 0.2263 - accuracy: 0.9027 - val_loss: 0.3203 - val_accuracy: 0.8085
Epoch 149/300
6/6 - 0s - loss: 0.2369 - accuracy: 0.8865 - val_loss: 0.3207 - val_accuracy: 0.8085
Epoch 150/300
6/6 - 0s - loss: 0.2445 - accuracy: 0.8703 - val_loss: 0.3220 - val_accuracy: 0.8085
Epoch 151/300
6/6 - 0s - loss: 0.2485 - accuracy: 0.8865 - val_loss: 0.3228 - val_accuracy: 0.7872
Epoch 152/300
6/6 - 0s - loss: 0.2295 - accuracy: 0.8919 - val_loss: 0.3228 - val_accuracy: 0.7872
Epoch 153/300
6/6 - 0s - loss: 0.2373 - accuracy: 0.8919 - val_loss: 0.3211 - val_accuracy: 0.8085
Epoch 154/300
6/6 - 0s - loss: 0.2130 - accuracy: 0.9135 - val_loss: 0.3223 - val_accuracy: 0.8085
Epoch 155/300
6/6 - 0s - loss: 0.2178 - accuracy: 0.8973 - val_loss: 0.3226 - val_accuracy: 0.8085
Epoch 156/300
6/6 - 0s - loss: 0.2287 - accuracy: 0.8973 - val_loss: 0.3238 - val_accuracy: 0.8085
Epoch 157/300
6/6 - 0s - loss: 0.2383 - accuracy: 0.8919 - val_loss: 0.3228 - val_accuracy: 0.8085
Epoch 158/300
6/6 - 0s - loss: 0.2543 - accuracy: 0.8811 - val_loss: 0.3234 - val_accuracy: 0.8085
Epoch 159/300
6/6 - 0s - loss: 0.2008 - accuracy: 0.9135 - val_loss: 0.3241 - val_accuracy: 0.8085
Epoch 160/300
6/6 - 0s - loss: 0.2296 - accuracy: 0.8919 - val_loss: 0.3228 - val_accuracy: 0.8085
Epoch 161/300
6/6 - 0s - loss: 0.1998 - accuracy: 0.9405 - val_loss: 0.3259 - val_accuracy: 0.7872
Epoch 162/300
6/6 - 0s - loss: 0.2508 - accuracy: 0.8703 - val_loss: 0.3257 - val_accuracy: 0.7872
Epoch 163/300
6/6 - 0s - loss: 0.2243 - accuracy: 0.9135 - val_loss: 0.3257 - val_accuracy: 0.7872
Epoch 164/300
6/6 - 0s - loss: 0.2227 - accuracy: 0.9081 - val_loss: 0.3258 - val_accuracy: 0.7872
Epoch 165/300
6/6 - 0s - loss: 0.2237 - accuracy: 0.8973 - val_loss: 0.3250 - val_accuracy: 0.7872
Epoch 166/300
6/6 - 0s - loss: 0.2383 - accuracy: 0.8703 - val_loss: 0.3252 - val_accuracy: 0.7872
Epoch 167/300
6/6 - 0s - loss: 0.2236 - accuracy: 0.8865 - val_loss: 0.3237 - val_accuracy: 0.7872
Epoch 168/300
6/6 - 0s - loss: 0.2217 - accuracy: 0.9135 - val_loss: 0.3250 - val_accuracy: 0.7872
Epoch 169/300
6/6 - 0s - loss: 0.2087 - accuracy: 0.8973 - val_loss: 0.3236 - val_accuracy: 0.7872
Epoch 170/300
6/6 - 0s - loss: 0.1991 - accuracy: 0.9189 - val_loss: 0.3245 - val_accuracy: 0.7872
Epoch 171/300
6/6 - 0s - loss: 0.2028 - accuracy: 0.9081 - val_loss: 0.3242 - val_accuracy: 0.7872
Epoch 172/300
6/6 - 0s - loss: 0.2286 - accuracy: 0.9081 - val_loss: 0.3246 - val_accuracy: 0.7872
Epoch 173/300
6/6 - 0s - loss: 0.2249 - accuracy: 0.9135 - val_loss: 0.3244 - val_accuracy: 0.7872
Epoch 174/300
6/6 - 0s - loss: 0.1947 - accuracy: 0.9297 - val_loss: 0.3238 - val_accuracy: 0.7872
Epoch 175/300
6/6 - 0s - loss: 0.2252 - accuracy: 0.9027 - val_loss: 0.3226 - val_accuracy: 0.7872
Epoch 176/300
6/6 - 0s - loss: 0.2024 - accuracy: 0.9081 - val_loss: 0.3232 - val_accuracy: 0.7872
Epoch 177/300
6/6 - 0s - loss: 0.2226 - accuracy: 0.9081 - val_loss: 0.3233 - val_accuracy: 0.7872
Epoch 178/300
6/6 - 0s - loss: 0.2708 - accuracy: 0.8757 - val_loss: 0.3217 - val_accuracy: 0.8085
Epoch 179/300
6/6 - 0s - loss: 0.1858 - accuracy: 0.9459 - val_loss: 0.3230 - val_accuracy: 0.8085
Epoch 180/300
6/6 - 0s - loss: 0.1878 - accuracy: 0.9297 - val_loss: 0.3244 - val_accuracy: 0.7872
Epoch 181/300
6/6 - 0s - loss: 0.2447 - accuracy: 0.8973 - val_loss: 0.3243 - val_accuracy: 0.8085
Epoch 182/300
6/6 - 0s - loss: 0.2070 - accuracy: 0.9027 - val_loss: 0.3245 - val_accuracy: 0.7872
Epoch 183/300
6/6 - 0s - loss: 0.1938 - accuracy: 0.9243 - val_loss: 0.3241 - val_accuracy: 0.7872
Epoch 184/300
6/6 - 0s - loss: 0.2157 - accuracy: 0.8973 - val_loss: 0.3240 - val_accuracy: 0.7872
Epoch 185/300
6/6 - 0s - loss: 0.2259 - accuracy: 0.8919 - val_loss: 0.3236 - val_accuracy: 0.7872
Epoch 186/300
6/6 - 0s - loss: 0.2234 - accuracy: 0.9027 - val_loss: 0.3236 - val_accuracy: 0.7872
Epoch 187/300
6/6 - 0s - loss: 0.2129 - accuracy: 0.9189 - val_loss: 0.3216 - val_accuracy: 0.7872
Epoch 188/300
6/6 - 0s - loss: 0.2000 - accuracy: 0.9027 - val_loss: 0.3208 - val_accuracy: 0.7872
Epoch 189/300
6/6 - 0s - loss: 0.2147 - accuracy: 0.9027 - val_loss: 0.3224 - val_accuracy: 0.7872
Epoch 190/300
6/6 - 0s - loss: 0.2224 - accuracy: 0.9081 - val_loss: 0.3218 - val_accuracy: 0.7872
Epoch 191/300
6/6 - 0s - loss: 0.2162 - accuracy: 0.9027 - val_loss: 0.3201 - val_accuracy: 0.8085
Epoch 192/300
6/6 - 0s - loss: 0.1995 - accuracy: 0.9135 - val_loss: 0.3206 - val_accuracy: 0.8085
Epoch 193/300
6/6 - 0s - loss: 0.2006 - accuracy: 0.9351 - val_loss: 0.3212 - val_accuracy: 0.7872
Epoch 194/300
6/6 - 0s - loss: 0.1892 - accuracy: 0.9081 - val_loss: 0.3232 - val_accuracy: 0.7872
Epoch 195/300
6/6 - 0s - loss: 0.2165 - accuracy: 0.9027 - val_loss: 0.3223 - val_accuracy: 0.7872
Epoch 196/300
6/6 - 0s - loss: 0.2161 - accuracy: 0.9135 - val_loss: 0.3211 - val_accuracy: 0.8085
Epoch 197/300
6/6 - 0s - loss: 0.2019 - accuracy: 0.9027 - val_loss: 0.3207 - val_accuracy: 0.8085
Epoch 198/300
6/6 - 0s - loss: 0.1887 - accuracy: 0.9297 - val_loss: 0.3203 - val_accuracy: 0.8298
Epoch 199/300
6/6 - 0s - loss: 0.2100 - accuracy: 0.9297 - val_loss: 0.3196 - val_accuracy: 0.8298
Epoch 200/300
6/6 - 0s - loss: 0.1826 - accuracy: 0.9351 - val_loss: 0.3192 - val_accuracy: 0.8298
Epoch 201/300
6/6 - 0s - loss: 0.2255 - accuracy: 0.8865 - val_loss: 0.3203 - val_accuracy: 0.8298
Epoch 202/300
6/6 - 0s - loss: 0.2053 - accuracy: 0.9189 - val_loss: 0.3210 - val_accuracy: 0.8298
Epoch 203/300
6/6 - 0s - loss: 0.2257 - accuracy: 0.8973 - val_loss: 0.3218 - val_accuracy: 0.8298
Epoch 204/300
6/6 - 0s - loss: 0.1857 - accuracy: 0.9135 - val_loss: 0.3235 - val_accuracy: 0.8298
Epoch 205/300
6/6 - 0s - loss: 0.1840 - accuracy: 0.9189 - val_loss: 0.3229 - val_accuracy: 0.8298
Epoch 206/300
6/6 - 0s - loss: 0.2113 - accuracy: 0.9081 - val_loss: 0.3240 - val_accuracy: 0.8085
Epoch 207/300
6/6 - 0s - loss: 0.1842 - accuracy: 0.9189 - val_loss: 0.3252 - val_accuracy: 0.7872
Epoch 208/300
6/6 - 0s - loss: 0.1946 - accuracy: 0.9351 - val_loss: 0.3246 - val_accuracy: 0.8085
Epoch 209/300
6/6 - 0s - loss: 0.1984 - accuracy: 0.9027 - val_loss: 0.3253 - val_accuracy: 0.8085
Epoch 210/300
6/6 - 0s - loss: 0.1851 - accuracy: 0.9243 - val_loss: 0.3241 - val_accuracy: 0.8298
Epoch 211/300
6/6 - 0s - loss: 0.1833 - accuracy: 0.9351 - val_loss: 0.3275 - val_accuracy: 0.7872
Epoch 212/300
6/6 - 0s - loss: 0.1928 - accuracy: 0.9081 - val_loss: 0.3282 - val_accuracy: 0.7872
Epoch 213/300
6/6 - 0s - loss: 0.1862 - accuracy: 0.9297 - val_loss: 0.3283 - val_accuracy: 0.7872
Epoch 214/300
6/6 - 0s - loss: 0.1947 - accuracy: 0.9027 - val_loss: 0.3288 - val_accuracy: 0.7872
Epoch 215/300
6/6 - 0s - loss: 0.1850 - accuracy: 0.9297 - val_loss: 0.3267 - val_accuracy: 0.8085
Epoch 216/300
6/6 - 0s - loss: 0.2003 - accuracy: 0.9189 - val_loss: 0.3278 - val_accuracy: 0.8085
Epoch 217/300
6/6 - 0s - loss: 0.2028 - accuracy: 0.9027 - val_loss: 0.3272 - val_accuracy: 0.8298
Epoch 218/300
6/6 - 0s - loss: 0.2185 - accuracy: 0.9027 - val_loss: 0.3276 - val_accuracy: 0.8085
Epoch 219/300
6/6 - 0s - loss: 0.1915 - accuracy: 0.9135 - val_loss: 0.3276 - val_accuracy: 0.8298
Epoch 220/300
6/6 - 0s - loss: 0.1819 - accuracy: 0.9405 - val_loss: 0.3281 - val_accuracy: 0.8298
Epoch 221/300
6/6 - 0s - loss: 0.1820 - accuracy: 0.9297 - val_loss: 0.3309 - val_accuracy: 0.8085
Epoch 222/300
6/6 - 0s - loss: 0.1946 - accuracy: 0.9189 - val_loss: 0.3326 - val_accuracy: 0.8085
Epoch 223/300
6/6 - 0s - loss: 0.1841 - accuracy: 0.9351 - val_loss: 0.3337 - val_accuracy: 0.8085
Epoch 224/300
6/6 - 0s - loss: 0.1842 - accuracy: 0.9243 - val_loss: 0.3353 - val_accuracy: 0.7872
Epoch 225/300
6/6 - 0s - loss: 0.1728 - accuracy: 0.9351 - val_loss: 0.3368 - val_accuracy: 0.7872
Epoch 226/300
6/6 - 0s - loss: 0.1806 - accuracy: 0.9351 - val_loss: 0.3359 - val_accuracy: 0.8298
Epoch 227/300
6/6 - 0s - loss: 0.1657 - accuracy: 0.9297 - val_loss: 0.3362 - val_accuracy: 0.8085
Epoch 228/300
6/6 - 0s - loss: 0.1747 - accuracy: 0.9243 - val_loss: 0.3342 - val_accuracy: 0.8085
Epoch 229/300
6/6 - 0s - loss: 0.1847 - accuracy: 0.9297 - val_loss: 0.3356 - val_accuracy: 0.8085
Epoch 230/300
6/6 - 0s - loss: 0.1654 - accuracy: 0.9405 - val_loss: 0.3373 - val_accuracy: 0.8085
Epoch 231/300
6/6 - 0s - loss: 0.1806 - accuracy: 0.9243 - val_loss: 0.3357 - val_accuracy: 0.8085
Epoch 232/300
6/6 - 0s - loss: 0.1494 - accuracy: 0.9459 - val_loss: 0.3368 - val_accuracy: 0.8085
Epoch 233/300
6/6 - 0s - loss: 0.1791 - accuracy: 0.9243 - val_loss: 0.3386 - val_accuracy: 0.8085
Epoch 234/300
6/6 - 0s - loss: 0.1647 - accuracy: 0.9405 - val_loss: 0.3389 - val_accuracy: 0.8085
Epoch 235/300
6/6 - 0s - loss: 0.1863 - accuracy: 0.9189 - val_loss: 0.3389 - val_accuracy: 0.8085
Epoch 236/300
6/6 - 0s - loss: 0.1813 - accuracy: 0.9135 - val_loss: 0.3387 - val_accuracy: 0.8085
Epoch 237/300
6/6 - 0s - loss: 0.1663 - accuracy: 0.9405 - val_loss: 0.3384 - val_accuracy: 0.8085
Epoch 238/300
6/6 - 0s - loss: 0.1652 - accuracy: 0.9351 - val_loss: 0.3387 - val_accuracy: 0.8085
Epoch 239/300
6/6 - 0s - loss: 0.1710 - accuracy: 0.9297 - val_loss: 0.3409 - val_accuracy: 0.7872
Epoch 240/300
6/6 - 0s - loss: 0.1908 - accuracy: 0.9243 - val_loss: 0.3391 - val_accuracy: 0.8085
Epoch 241/300
6/6 - 0s - loss: 0.1908 - accuracy: 0.9297 - val_loss: 0.3381 - val_accuracy: 0.8085
Epoch 242/300
6/6 - 0s - loss: 0.1853 - accuracy: 0.9243 - val_loss: 0.3402 - val_accuracy: 0.8085
Epoch 243/300
6/6 - 0s - loss: 0.1897 - accuracy: 0.9189 - val_loss: 0.3415 - val_accuracy: 0.8085
Epoch 244/300
6/6 - 0s - loss: 0.1503 - accuracy: 0.9297 - val_loss: 0.3430 - val_accuracy: 0.8085
Epoch 245/300
6/6 - 0s - loss: 0.1554 - accuracy: 0.9568 - val_loss: 0.3431 - val_accuracy: 0.8085
Epoch 246/300
6/6 - 0s - loss: 0.2005 - accuracy: 0.9081 - val_loss: 0.3412 - val_accuracy: 0.8085
Epoch 247/300
6/6 - 0s - loss: 0.1609 - accuracy: 0.9405 - val_loss: 0.3413 - val_accuracy: 0.8298
Epoch 248/300
6/6 - 0s - loss: 0.1707 - accuracy: 0.9135 - val_loss: 0.3422 - val_accuracy: 0.8298
Epoch 249/300
6/6 - 0s - loss: 0.1809 - accuracy: 0.9297 - val_loss: 0.3460 - val_accuracy: 0.7872
Epoch 250/300
6/6 - 0s - loss: 0.1628 - accuracy: 0.9351 - val_loss: 0.3441 - val_accuracy: 0.8085
Epoch 251/300
6/6 - 0s - loss: 0.1882 - accuracy: 0.9081 - val_loss: 0.3436 - val_accuracy: 0.8085
Epoch 252/300
6/6 - 0s - loss: 0.2149 - accuracy: 0.8919 - val_loss: 0.3420 - val_accuracy: 0.8298
Epoch 253/300
6/6 - 0s - loss: 0.2151 - accuracy: 0.8973 - val_loss: 0.3445 - val_accuracy: 0.8085
Epoch 254/300
6/6 - 0s - loss: 0.1737 - accuracy: 0.9405 - val_loss: 0.3459 - val_accuracy: 0.8085
Epoch 255/300
6/6 - 0s - loss: 0.1606 - accuracy: 0.9514 - val_loss: 0.3466 - val_accuracy: 0.7872
Epoch 256/300
6/6 - 0s - loss: 0.1820 - accuracy: 0.9135 - val_loss: 0.3461 - val_accuracy: 0.7872
Epoch 257/300
6/6 - 0s - loss: 0.1861 - accuracy: 0.9297 - val_loss: 0.3471 - val_accuracy: 0.7872
Epoch 258/300
6/6 - 0s - loss: 0.2048 - accuracy: 0.9297 - val_loss: 0.3473 - val_accuracy: 0.7872
Epoch 259/300
6/6 - 0s - loss: 0.1915 - accuracy: 0.9135 - val_loss: 0.3471 - val_accuracy: 0.7872
Epoch 260/300
6/6 - 0s - loss: 0.1740 - accuracy: 0.9081 - val_loss: 0.3464 - val_accuracy: 0.8085
Epoch 261/300
6/6 - 0s - loss: 0.1677 - accuracy: 0.9351 - val_loss: 0.3480 - val_accuracy: 0.8085
Epoch 262/300
6/6 - 0s - loss: 0.1739 - accuracy: 0.9189 - val_loss: 0.3500 - val_accuracy: 0.7872
Epoch 263/300
6/6 - 0s - loss: 0.1592 - accuracy: 0.9351 - val_loss: 0.3500 - val_accuracy: 0.8085
Epoch 264/300
6/6 - 0s - loss: 0.1813 - accuracy: 0.9351 - val_loss: 0.3500 - val_accuracy: 0.8085
Epoch 265/300
6/6 - 0s - loss: 0.1690 - accuracy: 0.9081 - val_loss: 0.3509 - val_accuracy: 0.8085
Epoch 266/300
6/6 - 0s - loss: 0.1499 - accuracy: 0.9459 - val_loss: 0.3535 - val_accuracy: 0.8085
Epoch 267/300
6/6 - 0s - loss: 0.1581 - accuracy: 0.9189 - val_loss: 0.3547 - val_accuracy: 0.8085
Epoch 268/300
6/6 - 0s - loss: 0.1845 - accuracy: 0.9027 - val_loss: 0.3531 - val_accuracy: 0.8085
Epoch 269/300
6/6 - 0s - loss: 0.1746 - accuracy: 0.9081 - val_loss: 0.3531 - val_accuracy: 0.8085
Epoch 270/300
6/6 - 0s - loss: 0.1441 - accuracy: 0.9243 - val_loss: 0.3536 - val_accuracy: 0.8085
Epoch 271/300
6/6 - 0s - loss: 0.1487 - accuracy: 0.9405 - val_loss: 0.3543 - val_accuracy: 0.8085
Epoch 272/300
6/6 - 0s - loss: 0.1442 - accuracy: 0.9568 - val_loss: 0.3551 - val_accuracy: 0.8085
Epoch 273/300
6/6 - 0s - loss: 0.1714 - accuracy: 0.9243 - val_loss: 0.3535 - val_accuracy: 0.8298
Epoch 274/300
6/6 - 0s - loss: 0.1625 - accuracy: 0.9459 - val_loss: 0.3541 - val_accuracy: 0.8298
Epoch 275/300
6/6 - 0s - loss: 0.1766 - accuracy: 0.9351 - val_loss: 0.3537 - val_accuracy: 0.8298
Epoch 276/300
6/6 - 0s - loss: 0.1542 - accuracy: 0.9243 - val_loss: 0.3541 - val_accuracy: 0.8298
Epoch 277/300
6/6 - 0s - loss: 0.1595 - accuracy: 0.9514 - val_loss: 0.3543 - val_accuracy: 0.8298
Epoch 278/300
6/6 - 0s - loss: 0.1629 - accuracy: 0.9405 - val_loss: 0.3548 - val_accuracy: 0.8298
Epoch 279/300
6/6 - 0s - loss: 0.1566 - accuracy: 0.9405 - val_loss: 0.3564 - val_accuracy: 0.8085
Epoch 280/300
6/6 - 0s - loss: 0.1372 - accuracy: 0.9568 - val_loss: 0.3581 - val_accuracy: 0.8085
Epoch 281/300
6/6 - 0s - loss: 0.1759 - accuracy: 0.9297 - val_loss: 0.3584 - val_accuracy: 0.8298
Epoch 282/300
6/6 - 0s - loss: 0.1875 - accuracy: 0.9135 - val_loss: 0.3579 - val_accuracy: 0.8298
Epoch 283/300
6/6 - 0s - loss: 0.1682 - accuracy: 0.9243 - val_loss: 0.3580 - val_accuracy: 0.8298
Epoch 284/300
6/6 - 0s - loss: 0.2109 - accuracy: 0.9297 - val_loss: 0.3557 - val_accuracy: 0.8298
Epoch 285/300
6/6 - 0s - loss: 0.1482 - accuracy: 0.9405 - val_loss: 0.3567 - val_accuracy: 0.8298
Epoch 286/300
6/6 - 0s - loss: 0.1344 - accuracy: 0.9514 - val_loss: 0.3569 - val_accuracy: 0.8298
Epoch 287/300
6/6 - 0s - loss: 0.1837 - accuracy: 0.9189 - val_loss: 0.3580 - val_accuracy: 0.8298
Epoch 288/300
6/6 - 0s - loss: 0.1491 - accuracy: 0.9351 - val_loss: 0.3601 - val_accuracy: 0.8298
Epoch 289/300
6/6 - 0s - loss: 0.1432 - accuracy: 0.9514 - val_loss: 0.3596 - val_accuracy: 0.8298
Epoch 290/300
6/6 - 0s - loss: 0.1445 - accuracy: 0.9568 - val_loss: 0.3612 - val_accuracy: 0.8298
Epoch 291/300
6/6 - 0s - loss: 0.1675 - accuracy: 0.9135 - val_loss: 0.3618 - val_accuracy: 0.8298
Epoch 292/300
6/6 - 0s - loss: 0.1589 - accuracy: 0.9189 - val_loss: 0.3613 - val_accuracy: 0.8298
Epoch 293/300
6/6 - 0s - loss: 0.1669 - accuracy: 0.9297 - val_loss: 0.3613 - val_accuracy: 0.8298
Epoch 294/300
6/6 - 0s - loss: 0.1511 - accuracy: 0.9243 - val_loss: 0.3631 - val_accuracy: 0.8298
Epoch 295/300
6/6 - 0s - loss: 0.1726 - accuracy: 0.9243 - val_loss: 0.3627 - val_accuracy: 0.8298
Epoch 296/300
6/6 - 0s - loss: 0.1474 - accuracy: 0.9405 - val_loss: 0.3632 - val_accuracy: 0.8298
Epoch 297/300
6/6 - 0s - loss: 0.1761 - accuracy: 0.9243 - val_loss: 0.3631 - val_accuracy: 0.8298
Epoch 298/300
6/6 - 0s - loss: 0.1724 - accuracy: 0.9351 - val_loss: 0.3629 - val_accuracy: 0.8298
Epoch 299/300
6/6 - 0s - loss: 0.1486 - accuracy: 0.9405 - val_loss: 0.3637 - val_accuracy: 0.8298
Epoch 300/300
6/6 - 0s - loss: 0.1404 - accuracy: 0.9351 - val_loss: 0.3652 - val_accuracy: 0.8298

Accuracy Loss Graphs

In [20]:
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label = 'val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend(loc='lower right')
Out[20]:
<matplotlib.legend.Legend at 0x7fb55404eac8>
In [21]:
plt.plot(history.history['loss'], label='loss')
plt.plot(history.history['val_loss'], label = 'val_loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend(loc='lower right')
Out[21]:
<matplotlib.legend.Legend at 0x7fb548760b00>
In [22]:
score = model.evaluate(X_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
Test loss: 0.2859385013580322
Test accuracy: 0.8717948794364929
In [23]:
model.save('model.h5')

Deep CC

In [24]:
!deepCC model.h5
[INFO]
Reading [keras model] 'model.h5'
[SUCCESS]
Saved 'model_deepC/model.onnx'
[INFO]
Reading [onnx model] 'model_deepC/model.onnx'
[INFO]
Model info:
  ir_vesion : 4
  doc       : 
[WARNING]
[ONNX]: terminal (input/output) dense_input's shape is less than 1. Changing it to 1.
[WARNING]
[ONNX]: terminal (input/output) dense_1's shape is less than 1. Changing it to 1.
WARN (GRAPH): found operator node with the same name (dense_1) as io node.
[INFO]
Running DNNC graph sanity check ...
[SUCCESS]
Passed sanity check.
[INFO]
Writing C++ file 'model_deepC/model.cpp'
[INFO]
deepSea model files are ready in 'model_deepC/' 
[RUNNING COMMAND]
g++ -std=c++11 -O3 -fno-rtti -fno-exceptions -I. -I/opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/include -isystem /opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/packages/eigen-eigen-323c052e1731 "model_deepC/model.cpp" -D_AITS_MAIN -o "model_deepC/model.exe"
[RUNNING COMMAND]
size "model_deepC/model.exe"
   text	   data	    bss	    dec	    hex	filename
 121291	   2968	    760	 125019	  1e85b	model_deepC/model.exe
[SUCCESS]
Saved model as executable "model_deepC/model.exe"
In [ ]: