Cainvas

Lower Pain Detection App

Credit: AITS Cainvas Community

Photo by Colleen Tracey on Dribbble

In [1]:
import pandas as pd
import numpy as np
import seaborn as sns

Import Data

In [2]:
!wget https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/lowerback.zip

!unzip -qo lowerback.zip

# zip folder is not needed anymore
!rm lowerback.zip
--2021-12-08 08:01:20--  https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/lowerback.zip
Resolving cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)... 52.219.64.60
Connecting to cainvas-static.s3.amazonaws.com (cainvas-static.s3.amazonaws.com)|52.219.64.60|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 20261 (20K) [application/x-zip-compressed]
Saving to: ‘lowerback.zip’

lowerback.zip       100%[===================>]  19.79K  --.-KB/s    in 0s      

2021-12-08 08:01:20 (81.0 MB/s) - ‘lowerback.zip’ saved [20261/20261]

In [3]:
data = pd.read_csv('Dataset_spine.csv')

Data Visualization

In [4]:
data.head()
Out[4]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att Unnamed: 13
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 Abnormal NaN
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 Abnormal NaN
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 Abnormal Prediction is done by using binary classificat...
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 Abnormal NaN
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 Abnormal NaN
In [5]:
data.drop(['Unnamed: 13'], axis=1, inplace=True)

data.head()
Out[5]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 Abnormal
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 Abnormal
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 Abnormal
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 Abnormal
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 Abnormal
In [6]:
data['Class_att'] = data['Class_att'].map({'Abnormal': 1, 'Normal': 0})

data.head()
Out[6]:
Col1 Col2 Col3 Col4 Col5 Col6 Col7 Col8 Col9 Col10 Col11 Col12 Class_att
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 1
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 1
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 1
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 1
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 1
In [7]:
data = data.rename(columns={'Col1': 'pelvic_incidence', 
                            'Col2': 'pelvic_tilt', 
                            'Col3': 'lumbar_lordosis_angle', 
                            'Col4': 'sacral_slope', 
                            'Col5': 'pelvic_radius', 
                            'Col6': 'degree_spondylolisthesis', 
                            'Col7': 'pelvic_slope', 
                            'Col8': 'direct_tilt', 
                            'Col9': 'thoracic_slope', 
                            'Col10': 'cervical_tilt', 
                            'Col11': 'sacrum_angle', 
                            'Col12': 'scoliosis_slope', 
                            'Class_att': 'class'})
In [8]:
data.head()
Out[8]:
pelvic_incidence pelvic_tilt lumbar_lordosis_angle sacral_slope pelvic_radius degree_spondylolisthesis pelvic_slope direct_tilt thoracic_slope cervical_tilt sacrum_angle scoliosis_slope class
0 63.027818 22.552586 39.609117 40.475232 98.672917 -0.254400 0.744503 12.5661 14.5386 15.30468 -28.658501 43.5123 1
1 39.056951 10.060991 25.015378 28.995960 114.405425 4.564259 0.415186 12.8874 17.5323 16.78486 -25.530607 16.1102 1
2 68.832021 22.218482 50.092194 46.613539 105.985135 -3.530317 0.474889 26.8343 17.4861 16.65897 -29.031888 19.2221 1
3 69.297008 24.652878 44.311238 44.644130 101.868495 11.211523 0.369345 23.5603 12.7074 11.42447 -30.470246 18.8329 1
4 49.712859 9.652075 28.317406 40.060784 108.168725 7.918501 0.543360 35.4940 15.9546 8.87237 -16.378376 24.9171 1
In [9]:
data.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 310 entries, 0 to 309
Data columns (total 13 columns):
 #   Column                    Non-Null Count  Dtype  
---  ------                    --------------  -----  
 0   pelvic_incidence          310 non-null    float64
 1   pelvic_tilt               310 non-null    float64
 2   lumbar_lordosis_angle     310 non-null    float64
 3   sacral_slope              310 non-null    float64
 4   pelvic_radius             310 non-null    float64
 5   degree_spondylolisthesis  310 non-null    float64
 6   pelvic_slope              310 non-null    float64
 7   direct_tilt               310 non-null    float64
 8   thoracic_slope            310 non-null    float64
 9   cervical_tilt             310 non-null    float64
 10  sacrum_angle              310 non-null    float64
 11  scoliosis_slope           310 non-null    float64
 12  class                     310 non-null    int64  
dtypes: float64(12), int64(1)
memory usage: 31.6 KB
In [10]:
data.describe()
Out[10]:
pelvic_incidence pelvic_tilt lumbar_lordosis_angle sacral_slope pelvic_radius degree_spondylolisthesis pelvic_slope direct_tilt thoracic_slope cervical_tilt sacrum_angle scoliosis_slope class
count 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000 310.000000
mean 60.496653 17.542822 51.930930 42.953831 117.920655 26.296694 0.472979 21.321526 13.064511 11.933317 -14.053139 25.645981 0.677419
std 17.236520 10.008330 18.554064 13.423102 13.317377 37.559027 0.285787 8.639423 3.399713 2.893265 12.225582 10.450558 0.468220
min 26.147921 -6.554948 14.000000 13.366931 70.082575 -11.058179 0.003220 7.027000 7.037800 7.030600 -35.287375 7.007900 0.000000
25% 46.430294 10.667069 37.000000 33.347122 110.709196 1.603727 0.224367 13.054400 10.417800 9.541140 -24.289522 17.189075 0.000000
50% 58.691038 16.357689 49.562398 42.404912 118.268178 11.767934 0.475989 21.907150 12.938450 11.953835 -14.622856 24.931950 1.000000
75% 72.877696 22.120395 63.000000 52.695888 125.467674 41.287352 0.704846 28.954075 15.889525 14.371810 -3.497094 33.979600 1.000000
max 129.834041 49.431864 125.742385 121.429566 163.071041 418.543082 0.998827 36.743900 19.324000 16.821080 6.972071 44.341200 1.000000
In [11]:
import matplotlib.pyplot as plt
import seaborn as sns
In [12]:
%matplotlib inline
sns.set_style('whitegrid')
In [13]:
plt.figure(figsize=(12,9))
sns.heatmap(data.corr(), annot=True)
Out[13]:
<AxesSubplot:>
In [14]:
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA


scaler = StandardScaler()

y = data['class'].values
X = scaler.fit_transform(data[data.columns[:-1]])

Train Test Split

In [15]:
from sklearn.model_selection import train_test_split
In [16]:
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=101)
In [17]:
import tensorflow
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.models import Sequential

Model Architecture

In [18]:
model = Sequential()
model.add(Dense(64, activation='relu', input_shape=(12,)))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 64)                832       
_________________________________________________________________
dropout (Dropout)            (None, 64)                0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 65        
=================================================================
Total params: 897
Trainable params: 897
Non-trainable params: 0
_________________________________________________________________

Training the model

In [19]:
history = model.fit(X_train, y_train, batch_size=32, epochs=300, verbose=2, validation_split=0.2)
Epoch 1/300
6/6 - 1s - loss: 0.7911 - accuracy: 0.4486 - val_loss: 0.7280 - val_accuracy: 0.4468
Epoch 2/300
6/6 - 0s - loss: 0.7713 - accuracy: 0.4703 - val_loss: 0.6933 - val_accuracy: 0.5319
Epoch 3/300
6/6 - 0s - loss: 0.6698 - accuracy: 0.5838 - val_loss: 0.6683 - val_accuracy: 0.5745
Epoch 4/300
6/6 - 0s - loss: 0.6637 - accuracy: 0.5676 - val_loss: 0.6472 - val_accuracy: 0.5745
Epoch 5/300
6/6 - 0s - loss: 0.6268 - accuracy: 0.5946 - val_loss: 0.6300 - val_accuracy: 0.5745
Epoch 6/300
6/6 - 0s - loss: 0.6062 - accuracy: 0.6270 - val_loss: 0.6130 - val_accuracy: 0.5957
Epoch 7/300
6/6 - 0s - loss: 0.6039 - accuracy: 0.6595 - val_loss: 0.5969 - val_accuracy: 0.6170
Epoch 8/300
6/6 - 0s - loss: 0.5946 - accuracy: 0.6432 - val_loss: 0.5831 - val_accuracy: 0.6170
Epoch 9/300
6/6 - 0s - loss: 0.5792 - accuracy: 0.6649 - val_loss: 0.5713 - val_accuracy: 0.6170
Epoch 10/300
6/6 - 0s - loss: 0.5498 - accuracy: 0.6919 - val_loss: 0.5602 - val_accuracy: 0.6383
Epoch 11/300
6/6 - 0s - loss: 0.5686 - accuracy: 0.6703 - val_loss: 0.5493 - val_accuracy: 0.7021
Epoch 12/300
6/6 - 0s - loss: 0.5479 - accuracy: 0.7081 - val_loss: 0.5398 - val_accuracy: 0.6809
Epoch 13/300
6/6 - 0s - loss: 0.5291 - accuracy: 0.7081 - val_loss: 0.5318 - val_accuracy: 0.6809
Epoch 14/300
6/6 - 0s - loss: 0.5106 - accuracy: 0.7622 - val_loss: 0.5235 - val_accuracy: 0.7021
Epoch 15/300
6/6 - 0s - loss: 0.5175 - accuracy: 0.7622 - val_loss: 0.5170 - val_accuracy: 0.7021
Epoch 16/300
6/6 - 0s - loss: 0.4827 - accuracy: 0.7622 - val_loss: 0.5102 - val_accuracy: 0.7447
Epoch 17/300
6/6 - 0s - loss: 0.5007 - accuracy: 0.7568 - val_loss: 0.5054 - val_accuracy: 0.7660
Epoch 18/300
6/6 - 0s - loss: 0.4764 - accuracy: 0.7730 - val_loss: 0.5002 - val_accuracy: 0.7660
Epoch 19/300
6/6 - 0s - loss: 0.4710 - accuracy: 0.7676 - val_loss: 0.4948 - val_accuracy: 0.7660
Epoch 20/300
6/6 - 0s - loss: 0.4551 - accuracy: 0.7676 - val_loss: 0.4907 - val_accuracy: 0.7660
Epoch 21/300
6/6 - 0s - loss: 0.4754 - accuracy: 0.7622 - val_loss: 0.4868 - val_accuracy: 0.7660
Epoch 22/300
6/6 - 0s - loss: 0.4470 - accuracy: 0.7946 - val_loss: 0.4811 - val_accuracy: 0.7660
Epoch 23/300
6/6 - 0s - loss: 0.4571 - accuracy: 0.7730 - val_loss: 0.4759 - val_accuracy: 0.7660
Epoch 24/300
6/6 - 0s - loss: 0.4828 - accuracy: 0.7622 - val_loss: 0.4713 - val_accuracy: 0.7872
Epoch 25/300
6/6 - 0s - loss: 0.4695 - accuracy: 0.8000 - val_loss: 0.4683 - val_accuracy: 0.7872
Epoch 26/300
6/6 - 0s - loss: 0.4504 - accuracy: 0.7514 - val_loss: 0.4650 - val_accuracy: 0.7872
Epoch 27/300
6/6 - 0s - loss: 0.4536 - accuracy: 0.7676 - val_loss: 0.4617 - val_accuracy: 0.7872
Epoch 28/300
6/6 - 0s - loss: 0.4411 - accuracy: 0.7730 - val_loss: 0.4572 - val_accuracy: 0.7872
Epoch 29/300
6/6 - 0s - loss: 0.4394 - accuracy: 0.8000 - val_loss: 0.4553 - val_accuracy: 0.7872
Epoch 30/300
6/6 - 0s - loss: 0.4149 - accuracy: 0.8270 - val_loss: 0.4523 - val_accuracy: 0.7872
Epoch 31/300
6/6 - 0s - loss: 0.4362 - accuracy: 0.7892 - val_loss: 0.4490 - val_accuracy: 0.7872
Epoch 32/300
6/6 - 0s - loss: 0.4102 - accuracy: 0.8324 - val_loss: 0.4460 - val_accuracy: 0.7872
Epoch 33/300
6/6 - 0s - loss: 0.4336 - accuracy: 0.8000 - val_loss: 0.4437 - val_accuracy: 0.7872
Epoch 34/300
6/6 - 0s - loss: 0.4032 - accuracy: 0.7892 - val_loss: 0.4410 - val_accuracy: 0.7872
Epoch 35/300
6/6 - 0s - loss: 0.4169 - accuracy: 0.8216 - val_loss: 0.4386 - val_accuracy: 0.7872
Epoch 36/300
6/6 - 0s - loss: 0.4418 - accuracy: 0.7784 - val_loss: 0.4365 - val_accuracy: 0.7872
Epoch 37/300
6/6 - 0s - loss: 0.4035 - accuracy: 0.8324 - val_loss: 0.4337 - val_accuracy: 0.7872
Epoch 38/300
6/6 - 0s - loss: 0.3854 - accuracy: 0.8324 - val_loss: 0.4308 - val_accuracy: 0.8085
Epoch 39/300
6/6 - 0s - loss: 0.3989 - accuracy: 0.8378 - val_loss: 0.4287 - val_accuracy: 0.8085
Epoch 40/300
6/6 - 0s - loss: 0.4103 - accuracy: 0.8378 - val_loss: 0.4265 - val_accuracy: 0.8085
Epoch 41/300
6/6 - 0s - loss: 0.4040 - accuracy: 0.8378 - val_loss: 0.4237 - val_accuracy: 0.8085
Epoch 42/300
6/6 - 0s - loss: 0.4052 - accuracy: 0.7946 - val_loss: 0.4214 - val_accuracy: 0.8085
Epoch 43/300
6/6 - 0s - loss: 0.3721 - accuracy: 0.8324 - val_loss: 0.4190 - val_accuracy: 0.8085
Epoch 44/300
6/6 - 0s - loss: 0.4237 - accuracy: 0.8000 - val_loss: 0.4176 - val_accuracy: 0.8298
Epoch 45/300
6/6 - 0s - loss: 0.3819 - accuracy: 0.8324 - val_loss: 0.4160 - val_accuracy: 0.8085
Epoch 46/300
6/6 - 0s - loss: 0.3947 - accuracy: 0.8000 - val_loss: 0.4135 - val_accuracy: 0.8298
Epoch 47/300
6/6 - 0s - loss: 0.3741 - accuracy: 0.8486 - val_loss: 0.4121 - val_accuracy: 0.8298
Epoch 48/300
6/6 - 0s - loss: 0.3883 - accuracy: 0.8270 - val_loss: 0.4088 - val_accuracy: 0.8511
Epoch 49/300
6/6 - 0s - loss: 0.3770 - accuracy: 0.8378 - val_loss: 0.4072 - val_accuracy: 0.8511
Epoch 50/300
6/6 - 0s - loss: 0.3555 - accuracy: 0.8432 - val_loss: 0.4048 - val_accuracy: 0.8511
Epoch 51/300
6/6 - 0s - loss: 0.3572 - accuracy: 0.8541 - val_loss: 0.4038 - val_accuracy: 0.8511
Epoch 52/300
6/6 - 0s - loss: 0.4001 - accuracy: 0.8216 - val_loss: 0.4015 - val_accuracy: 0.8511
Epoch 53/300
6/6 - 0s - loss: 0.3552 - accuracy: 0.8324 - val_loss: 0.3988 - val_accuracy: 0.8511
Epoch 54/300
6/6 - 0s - loss: 0.3520 - accuracy: 0.8378 - val_loss: 0.3969 - val_accuracy: 0.8723
Epoch 55/300
6/6 - 0s - loss: 0.3321 - accuracy: 0.8811 - val_loss: 0.3951 - val_accuracy: 0.8723
Epoch 56/300
6/6 - 0s - loss: 0.3769 - accuracy: 0.8378 - val_loss: 0.3932 - val_accuracy: 0.8936
Epoch 57/300
6/6 - 0s - loss: 0.3412 - accuracy: 0.8649 - val_loss: 0.3905 - val_accuracy: 0.8936
Epoch 58/300
6/6 - 0s - loss: 0.3529 - accuracy: 0.8324 - val_loss: 0.3895 - val_accuracy: 0.9149
Epoch 59/300
6/6 - 0s - loss: 0.3306 - accuracy: 0.8703 - val_loss: 0.3882 - val_accuracy: 0.9149
Epoch 60/300
6/6 - 0s - loss: 0.3495 - accuracy: 0.8541 - val_loss: 0.3864 - val_accuracy: 0.8936
Epoch 61/300
6/6 - 0s - loss: 0.3399 - accuracy: 0.8703 - val_loss: 0.3846 - val_accuracy: 0.8936
Epoch 62/300
6/6 - 0s - loss: 0.3471 - accuracy: 0.8378 - val_loss: 0.3820 - val_accuracy: 0.8936
Epoch 63/300
6/6 - 0s - loss: 0.3318 - accuracy: 0.8595 - val_loss: 0.3801 - val_accuracy: 0.8936
Epoch 64/300
6/6 - 0s - loss: 0.3106 - accuracy: 0.8595 - val_loss: 0.3785 - val_accuracy: 0.8936
Epoch 65/300
6/6 - 0s - loss: 0.3398 - accuracy: 0.8595 - val_loss: 0.3778 - val_accuracy: 0.8936
Epoch 66/300
6/6 - 0s - loss: 0.3327 - accuracy: 0.8595 - val_loss: 0.3766 - val_accuracy: 0.8936
Epoch 67/300
6/6 - 0s - loss: 0.3178 - accuracy: 0.8486 - val_loss: 0.3755 - val_accuracy: 0.8936
Epoch 68/300
6/6 - 0s - loss: 0.3108 - accuracy: 0.8757 - val_loss: 0.3738 - val_accuracy: 0.8936
Epoch 69/300
6/6 - 0s - loss: 0.3525 - accuracy: 0.8649 - val_loss: 0.3718 - val_accuracy: 0.8936
Epoch 70/300
6/6 - 0s - loss: 0.3285 - accuracy: 0.8486 - val_loss: 0.3694 - val_accuracy: 0.8936
Epoch 71/300
6/6 - 0s - loss: 0.3305 - accuracy: 0.8486 - val_loss: 0.3680 - val_accuracy: 0.8936
Epoch 72/300
6/6 - 0s - loss: 0.3240 - accuracy: 0.8378 - val_loss: 0.3670 - val_accuracy: 0.8936
Epoch 73/300
6/6 - 0s - loss: 0.3085 - accuracy: 0.8486 - val_loss: 0.3662 - val_accuracy: 0.9149
Epoch 74/300
6/6 - 0s - loss: 0.3392 - accuracy: 0.8486 - val_loss: 0.3643 - val_accuracy: 0.9149
Epoch 75/300
6/6 - 0s - loss: 0.3211 - accuracy: 0.8541 - val_loss: 0.3642 - val_accuracy: 0.9149
Epoch 76/300
6/6 - 0s - loss: 0.3093 - accuracy: 0.8703 - val_loss: 0.3629 - val_accuracy: 0.9149
Epoch 77/300
6/6 - 0s - loss: 0.3067 - accuracy: 0.8324 - val_loss: 0.3619 - val_accuracy: 0.9149
Epoch 78/300
6/6 - 0s - loss: 0.3131 - accuracy: 0.8541 - val_loss: 0.3605 - val_accuracy: 0.9149
Epoch 79/300
6/6 - 0s - loss: 0.3101 - accuracy: 0.8703 - val_loss: 0.3587 - val_accuracy: 0.9149
Epoch 80/300
6/6 - 0s - loss: 0.2831 - accuracy: 0.9189 - val_loss: 0.3581 - val_accuracy: 0.9149
Epoch 81/300
6/6 - 0s - loss: 0.3133 - accuracy: 0.8378 - val_loss: 0.3576 - val_accuracy: 0.9149
Epoch 82/300
6/6 - 0s - loss: 0.2937 - accuracy: 0.8703 - val_loss: 0.3565 - val_accuracy: 0.9149
Epoch 83/300
6/6 - 0s - loss: 0.3085 - accuracy: 0.8432 - val_loss: 0.3547 - val_accuracy: 0.9149
Epoch 84/300
6/6 - 0s - loss: 0.2913 - accuracy: 0.8757 - val_loss: 0.3532 - val_accuracy: 0.9149
Epoch 85/300
6/6 - 0s - loss: 0.3359 - accuracy: 0.8541 - val_loss: 0.3526 - val_accuracy: 0.9149
Epoch 86/300
6/6 - 0s - loss: 0.3027 - accuracy: 0.8757 - val_loss: 0.3533 - val_accuracy: 0.9149
Epoch 87/300
6/6 - 0s - loss: 0.2879 - accuracy: 0.8541 - val_loss: 0.3519 - val_accuracy: 0.9149
Epoch 88/300
6/6 - 0s - loss: 0.2892 - accuracy: 0.9081 - val_loss: 0.3510 - val_accuracy: 0.9149
Epoch 89/300
6/6 - 0s - loss: 0.2847 - accuracy: 0.8757 - val_loss: 0.3503 - val_accuracy: 0.9149
Epoch 90/300
6/6 - 0s - loss: 0.2818 - accuracy: 0.8649 - val_loss: 0.3496 - val_accuracy: 0.8936
Epoch 91/300
6/6 - 0s - loss: 0.2926 - accuracy: 0.8486 - val_loss: 0.3479 - val_accuracy: 0.9149
Epoch 92/300
6/6 - 0s - loss: 0.2859 - accuracy: 0.8865 - val_loss: 0.3473 - val_accuracy: 0.8936
Epoch 93/300
6/6 - 0s - loss: 0.2693 - accuracy: 0.8865 - val_loss: 0.3464 - val_accuracy: 0.8936
Epoch 94/300
6/6 - 0s - loss: 0.3054 - accuracy: 0.8649 - val_loss: 0.3464 - val_accuracy: 0.8511
Epoch 95/300
6/6 - 0s - loss: 0.2911 - accuracy: 0.8865 - val_loss: 0.3458 - val_accuracy: 0.8723
Epoch 96/300
6/6 - 0s - loss: 0.2828 - accuracy: 0.8649 - val_loss: 0.3458 - val_accuracy: 0.9149
Epoch 97/300
6/6 - 0s - loss: 0.2998 - accuracy: 0.8486 - val_loss: 0.3455 - val_accuracy: 0.8511
Epoch 98/300
6/6 - 0s - loss: 0.2823 - accuracy: 0.8811 - val_loss: 0.3456 - val_accuracy: 0.8298
Epoch 99/300
6/6 - 0s - loss: 0.2709 - accuracy: 0.8757 - val_loss: 0.3456 - val_accuracy: 0.8298
Epoch 100/300
6/6 - 0s - loss: 0.2919 - accuracy: 0.8703 - val_loss: 0.3458 - val_accuracy: 0.8085
Epoch 101/300
6/6 - 0s - loss: 0.2996 - accuracy: 0.8541 - val_loss: 0.3452 - val_accuracy: 0.8085
Epoch 102/300
6/6 - 0s - loss: 0.2521 - accuracy: 0.8919 - val_loss: 0.3443 - val_accuracy: 0.8085
Epoch 103/300
6/6 - 0s - loss: 0.2739 - accuracy: 0.8757 - val_loss: 0.3443 - val_accuracy: 0.8085
Epoch 104/300
6/6 - 0s - loss: 0.2951 - accuracy: 0.8649 - val_loss: 0.3438 - val_accuracy: 0.8085
Epoch 105/300
6/6 - 0s - loss: 0.2976 - accuracy: 0.8486 - val_loss: 0.3427 - val_accuracy: 0.8085
Epoch 106/300
6/6 - 0s - loss: 0.2893 - accuracy: 0.8324 - val_loss: 0.3418 - val_accuracy: 0.8298
Epoch 107/300
6/6 - 0s - loss: 0.2624 - accuracy: 0.8703 - val_loss: 0.3416 - val_accuracy: 0.8298
Epoch 108/300
6/6 - 0s - loss: 0.2759 - accuracy: 0.8811 - val_loss: 0.3409 - val_accuracy: 0.8298
Epoch 109/300
6/6 - 0s - loss: 0.2535 - accuracy: 0.9027 - val_loss: 0.3406 - val_accuracy: 0.8298
Epoch 110/300
6/6 - 0s - loss: 0.2813 - accuracy: 0.8649 - val_loss: 0.3399 - val_accuracy: 0.8298
Epoch 111/300
6/6 - 0s - loss: 0.2677 - accuracy: 0.8703 - val_loss: 0.3404 - val_accuracy: 0.8298
Epoch 112/300
6/6 - 0s - loss: 0.2623 - accuracy: 0.8865 - val_loss: 0.3402 - val_accuracy: 0.8298
Epoch 113/300
6/6 - 0s - loss: 0.2653 - accuracy: 0.8757 - val_loss: 0.3401 - val_accuracy: 0.8298
Epoch 114/300
6/6 - 0s - loss: 0.2606 - accuracy: 0.9027 - val_loss: 0.3394 - val_accuracy: 0.8298
Epoch 115/300
6/6 - 0s - loss: 0.2585 - accuracy: 0.8865 - val_loss: 0.3395 - val_accuracy: 0.8298
Epoch 116/300
6/6 - 0s - loss: 0.2699 - accuracy: 0.8703 - val_loss: 0.3392 - val_accuracy: 0.8298
Epoch 117/300
6/6 - 0s - loss: 0.2519 - accuracy: 0.8757 - val_loss: 0.3391 - val_accuracy: 0.8298
Epoch 118/300
6/6 - 0s - loss: 0.2626 - accuracy: 0.8919 - val_loss: 0.3394 - val_accuracy: 0.8298
Epoch 119/300
6/6 - 0s - loss: 0.2752 - accuracy: 0.8757 - val_loss: 0.3382 - val_accuracy: 0.8298
Epoch 120/300
6/6 - 0s - loss: 0.2430 - accuracy: 0.8919 - val_loss: 0.3382 - val_accuracy: 0.8298
Epoch 121/300
6/6 - 0s - loss: 0.2562 - accuracy: 0.8865 - val_loss: 0.3385 - val_accuracy: 0.8085
Epoch 122/300
6/6 - 0s - loss: 0.2482 - accuracy: 0.9081 - val_loss: 0.3386 - val_accuracy: 0.8298
Epoch 123/300
6/6 - 0s - loss: 0.2491 - accuracy: 0.8973 - val_loss: 0.3376 - val_accuracy: 0.8298
Epoch 124/300
6/6 - 0s - loss: 0.2559 - accuracy: 0.8757 - val_loss: 0.3369 - val_accuracy: 0.8298
Epoch 125/300
6/6 - 0s - loss: 0.2302 - accuracy: 0.9135 - val_loss: 0.3369 - val_accuracy: 0.8511
Epoch 126/300
6/6 - 0s - loss: 0.2389 - accuracy: 0.9027 - val_loss: 0.3363 - val_accuracy: 0.8511
Epoch 127/300
6/6 - 0s - loss: 0.2366 - accuracy: 0.8973 - val_loss: 0.3360 - val_accuracy: 0.8511
Epoch 128/300
6/6 - 0s - loss: 0.2738 - accuracy: 0.8486 - val_loss: 0.3371 - val_accuracy: 0.8298
Epoch 129/300
6/6 - 0s - loss: 0.2317 - accuracy: 0.8973 - val_loss: 0.3376 - val_accuracy: 0.8511
Epoch 130/300
6/6 - 0s - loss: 0.2409 - accuracy: 0.8973 - val_loss: 0.3373 - val_accuracy: 0.8511
Epoch 131/300
6/6 - 0s - loss: 0.2377 - accuracy: 0.9027 - val_loss: 0.3370 - val_accuracy: 0.8511
Epoch 132/300
6/6 - 0s - loss: 0.2633 - accuracy: 0.8973 - val_loss: 0.3374 - val_accuracy: 0.8511
Epoch 133/300
6/6 - 0s - loss: 0.2459 - accuracy: 0.8973 - val_loss: 0.3370 - val_accuracy: 0.8511
Epoch 134/300
6/6 - 0s - loss: 0.2510 - accuracy: 0.8919 - val_loss: 0.3361 - val_accuracy: 0.8511
Epoch 135/300
6/6 - 0s - loss: 0.2295 - accuracy: 0.8919 - val_loss: 0.3375 - val_accuracy: 0.8298
Epoch 136/300
6/6 - 0s - loss: 0.2439 - accuracy: 0.9081 - val_loss: 0.3378 - val_accuracy: 0.8298
Epoch 137/300
6/6 - 0s - loss: 0.2368 - accuracy: 0.9081 - val_loss: 0.3378 - val_accuracy: 0.8298
Epoch 138/300
6/6 - 0s - loss: 0.2502 - accuracy: 0.8703 - val_loss: 0.3383 - val_accuracy: 0.8298
Epoch 139/300
6/6 - 0s - loss: 0.2307 - accuracy: 0.8919 - val_loss: 0.3383 - val_accuracy: 0.8298
Epoch 140/300
6/6 - 0s - loss: 0.2287 - accuracy: 0.9027 - val_loss: 0.3388 - val_accuracy: 0.8298
Epoch 141/300
6/6 - 0s - loss: 0.2502 - accuracy: 0.8865 - val_loss: 0.3386 - val_accuracy: 0.8298
Epoch 142/300
6/6 - 0s - loss: 0.2138 - accuracy: 0.9189 - val_loss: 0.3389 - val_accuracy: 0.8298
Epoch 143/300
6/6 - 0s - loss: 0.2429 - accuracy: 0.8865 - val_loss: 0.3400 - val_accuracy: 0.8298
Epoch 144/300
6/6 - 0s - loss: 0.2111 - accuracy: 0.9081 - val_loss: 0.3403 - val_accuracy: 0.8298
Epoch 145/300
6/6 - 0s - loss: 0.2289 - accuracy: 0.9081 - val_loss: 0.3403 - val_accuracy: 0.8298
Epoch 146/300
6/6 - 0s - loss: 0.2701 - accuracy: 0.8541 - val_loss: 0.3396 - val_accuracy: 0.8298
Epoch 147/300
6/6 - 0s - loss: 0.2494 - accuracy: 0.8757 - val_loss: 0.3401 - val_accuracy: 0.8298
Epoch 148/300
6/6 - 0s - loss: 0.2242 - accuracy: 0.9027 - val_loss: 0.3405 - val_accuracy: 0.8298
Epoch 149/300
6/6 - 0s - loss: 0.2417 - accuracy: 0.8919 - val_loss: 0.3407 - val_accuracy: 0.8298
Epoch 150/300
6/6 - 0s - loss: 0.2394 - accuracy: 0.9027 - val_loss: 0.3386 - val_accuracy: 0.8298
Epoch 151/300
6/6 - 0s - loss: 0.2335 - accuracy: 0.9081 - val_loss: 0.3371 - val_accuracy: 0.8511
Epoch 152/300
6/6 - 0s - loss: 0.2506 - accuracy: 0.8919 - val_loss: 0.3376 - val_accuracy: 0.8511
Epoch 153/300
6/6 - 0s - loss: 0.2460 - accuracy: 0.9135 - val_loss: 0.3378 - val_accuracy: 0.8511
Epoch 154/300
6/6 - 0s - loss: 0.2377 - accuracy: 0.8811 - val_loss: 0.3386 - val_accuracy: 0.8298
Epoch 155/300
6/6 - 0s - loss: 0.2393 - accuracy: 0.8973 - val_loss: 0.3385 - val_accuracy: 0.8298
Epoch 156/300
6/6 - 0s - loss: 0.2269 - accuracy: 0.8973 - val_loss: 0.3387 - val_accuracy: 0.8511
Epoch 157/300
6/6 - 0s - loss: 0.2098 - accuracy: 0.9297 - val_loss: 0.3400 - val_accuracy: 0.8298
Epoch 158/300
6/6 - 0s - loss: 0.2282 - accuracy: 0.9027 - val_loss: 0.3417 - val_accuracy: 0.8298
Epoch 159/300
6/6 - 0s - loss: 0.2358 - accuracy: 0.9081 - val_loss: 0.3414 - val_accuracy: 0.8085
Epoch 160/300
6/6 - 0s - loss: 0.2212 - accuracy: 0.8973 - val_loss: 0.3417 - val_accuracy: 0.8085
Epoch 161/300
6/6 - 0s - loss: 0.2425 - accuracy: 0.8973 - val_loss: 0.3436 - val_accuracy: 0.8085
Epoch 162/300
6/6 - 0s - loss: 0.2288 - accuracy: 0.8865 - val_loss: 0.3422 - val_accuracy: 0.8085
Epoch 163/300
6/6 - 0s - loss: 0.2140 - accuracy: 0.9189 - val_loss: 0.3432 - val_accuracy: 0.8085
Epoch 164/300
6/6 - 0s - loss: 0.2209 - accuracy: 0.9135 - val_loss: 0.3434 - val_accuracy: 0.8085
Epoch 165/300
6/6 - 0s - loss: 0.2210 - accuracy: 0.8973 - val_loss: 0.3430 - val_accuracy: 0.8085
Epoch 166/300
6/6 - 0s - loss: 0.2084 - accuracy: 0.9243 - val_loss: 0.3431 - val_accuracy: 0.8085
Epoch 167/300
6/6 - 0s - loss: 0.2127 - accuracy: 0.8919 - val_loss: 0.3433 - val_accuracy: 0.8085
Epoch 168/300
6/6 - 0s - loss: 0.2510 - accuracy: 0.8919 - val_loss: 0.3438 - val_accuracy: 0.8085
Epoch 169/300
6/6 - 0s - loss: 0.2278 - accuracy: 0.8973 - val_loss: 0.3456 - val_accuracy: 0.8085
Epoch 170/300
6/6 - 0s - loss: 0.2277 - accuracy: 0.8919 - val_loss: 0.3448 - val_accuracy: 0.8511
Epoch 171/300
6/6 - 0s - loss: 0.2283 - accuracy: 0.8973 - val_loss: 0.3438 - val_accuracy: 0.8511
Epoch 172/300
6/6 - 0s - loss: 0.2129 - accuracy: 0.8973 - val_loss: 0.3449 - val_accuracy: 0.8298
Epoch 173/300
6/6 - 0s - loss: 0.2191 - accuracy: 0.9027 - val_loss: 0.3446 - val_accuracy: 0.8511
Epoch 174/300
6/6 - 0s - loss: 0.2358 - accuracy: 0.9027 - val_loss: 0.3441 - val_accuracy: 0.8298
Epoch 175/300
6/6 - 0s - loss: 0.2153 - accuracy: 0.9027 - val_loss: 0.3448 - val_accuracy: 0.8298
Epoch 176/300
6/6 - 0s - loss: 0.2017 - accuracy: 0.9027 - val_loss: 0.3455 - val_accuracy: 0.8511
Epoch 177/300
6/6 - 0s - loss: 0.2284 - accuracy: 0.9027 - val_loss: 0.3461 - val_accuracy: 0.8298
Epoch 178/300
6/6 - 0s - loss: 0.2185 - accuracy: 0.9081 - val_loss: 0.3465 - val_accuracy: 0.8298
Epoch 179/300
6/6 - 0s - loss: 0.2007 - accuracy: 0.9135 - val_loss: 0.3480 - val_accuracy: 0.8085
Epoch 180/300
6/6 - 0s - loss: 0.2170 - accuracy: 0.9135 - val_loss: 0.3498 - val_accuracy: 0.8085
Epoch 181/300
6/6 - 0s - loss: 0.2032 - accuracy: 0.9135 - val_loss: 0.3501 - val_accuracy: 0.8085
Epoch 182/300
6/6 - 0s - loss: 0.2194 - accuracy: 0.9081 - val_loss: 0.3480 - val_accuracy: 0.8085
Epoch 183/300
6/6 - 0s - loss: 0.1988 - accuracy: 0.9243 - val_loss: 0.3482 - val_accuracy: 0.8085
Epoch 184/300
6/6 - 0s - loss: 0.2047 - accuracy: 0.9135 - val_loss: 0.3474 - val_accuracy: 0.8298
Epoch 185/300
6/6 - 0s - loss: 0.2302 - accuracy: 0.9027 - val_loss: 0.3482 - val_accuracy: 0.8298
Epoch 186/300
6/6 - 0s - loss: 0.1923 - accuracy: 0.9189 - val_loss: 0.3474 - val_accuracy: 0.8298
Epoch 187/300
6/6 - 0s - loss: 0.1862 - accuracy: 0.9405 - val_loss: 0.3476 - val_accuracy: 0.8298
Epoch 188/300
6/6 - 0s - loss: 0.1917 - accuracy: 0.9135 - val_loss: 0.3467 - val_accuracy: 0.8298
Epoch 189/300
6/6 - 0s - loss: 0.2129 - accuracy: 0.8973 - val_loss: 0.3477 - val_accuracy: 0.8298
Epoch 190/300
6/6 - 0s - loss: 0.1817 - accuracy: 0.9405 - val_loss: 0.3470 - val_accuracy: 0.8298
Epoch 191/300
6/6 - 0s - loss: 0.2015 - accuracy: 0.9351 - val_loss: 0.3467 - val_accuracy: 0.8298
Epoch 192/300
6/6 - 0s - loss: 0.2017 - accuracy: 0.9027 - val_loss: 0.3451 - val_accuracy: 0.8511
Epoch 193/300
6/6 - 0s - loss: 0.1856 - accuracy: 0.9297 - val_loss: 0.3467 - val_accuracy: 0.8511
Epoch 194/300
6/6 - 0s - loss: 0.2271 - accuracy: 0.9027 - val_loss: 0.3477 - val_accuracy: 0.8298
Epoch 195/300
6/6 - 0s - loss: 0.2268 - accuracy: 0.8919 - val_loss: 0.3495 - val_accuracy: 0.8298
Epoch 196/300
6/6 - 0s - loss: 0.1730 - accuracy: 0.9189 - val_loss: 0.3496 - val_accuracy: 0.8298
Epoch 197/300
6/6 - 0s - loss: 0.2092 - accuracy: 0.9135 - val_loss: 0.3495 - val_accuracy: 0.8298
Epoch 198/300
6/6 - 0s - loss: 0.1891 - accuracy: 0.9081 - val_loss: 0.3493 - val_accuracy: 0.8298
Epoch 199/300
6/6 - 0s - loss: 0.2114 - accuracy: 0.9027 - val_loss: 0.3500 - val_accuracy: 0.8298
Epoch 200/300
6/6 - 0s - loss: 0.1855 - accuracy: 0.9405 - val_loss: 0.3512 - val_accuracy: 0.8298
Epoch 201/300
6/6 - 0s - loss: 0.1866 - accuracy: 0.9135 - val_loss: 0.3516 - val_accuracy: 0.8511
Epoch 202/300
6/6 - 0s - loss: 0.1894 - accuracy: 0.9189 - val_loss: 0.3503 - val_accuracy: 0.8511
Epoch 203/300
6/6 - 0s - loss: 0.2253 - accuracy: 0.8973 - val_loss: 0.3489 - val_accuracy: 0.8511
Epoch 204/300
6/6 - 0s - loss: 0.2037 - accuracy: 0.9081 - val_loss: 0.3496 - val_accuracy: 0.8511
Epoch 205/300
6/6 - 0s - loss: 0.2190 - accuracy: 0.9135 - val_loss: 0.3505 - val_accuracy: 0.8511
Epoch 206/300
6/6 - 0s - loss: 0.2169 - accuracy: 0.8919 - val_loss: 0.3530 - val_accuracy: 0.8511
Epoch 207/300
6/6 - 0s - loss: 0.2094 - accuracy: 0.9189 - val_loss: 0.3519 - val_accuracy: 0.8511
Epoch 208/300
6/6 - 0s - loss: 0.2387 - accuracy: 0.8919 - val_loss: 0.3523 - val_accuracy: 0.8511
Epoch 209/300
6/6 - 0s - loss: 0.1718 - accuracy: 0.9297 - val_loss: 0.3542 - val_accuracy: 0.8298
Epoch 210/300
6/6 - 0s - loss: 0.1931 - accuracy: 0.9189 - val_loss: 0.3545 - val_accuracy: 0.8298
Epoch 211/300
6/6 - 0s - loss: 0.2025 - accuracy: 0.9135 - val_loss: 0.3546 - val_accuracy: 0.8511
Epoch 212/300
6/6 - 0s - loss: 0.1913 - accuracy: 0.9297 - val_loss: 0.3551 - val_accuracy: 0.8511
Epoch 213/300
6/6 - 0s - loss: 0.1884 - accuracy: 0.9297 - val_loss: 0.3537 - val_accuracy: 0.8511
Epoch 214/300
6/6 - 0s - loss: 0.1913 - accuracy: 0.9189 - val_loss: 0.3541 - val_accuracy: 0.8723
Epoch 215/300
6/6 - 0s - loss: 0.1880 - accuracy: 0.9297 - val_loss: 0.3560 - val_accuracy: 0.8723
Epoch 216/300
6/6 - 0s - loss: 0.1806 - accuracy: 0.9405 - val_loss: 0.3570 - val_accuracy: 0.8511
Epoch 217/300
6/6 - 0s - loss: 0.2132 - accuracy: 0.9189 - val_loss: 0.3563 - val_accuracy: 0.8511
Epoch 218/300
6/6 - 0s - loss: 0.1587 - accuracy: 0.9459 - val_loss: 0.3566 - val_accuracy: 0.8511
Epoch 219/300
6/6 - 0s - loss: 0.1956 - accuracy: 0.9189 - val_loss: 0.3557 - val_accuracy: 0.8723
Epoch 220/300
6/6 - 0s - loss: 0.1815 - accuracy: 0.9189 - val_loss: 0.3574 - val_accuracy: 0.8723
Epoch 221/300
6/6 - 0s - loss: 0.1784 - accuracy: 0.9135 - val_loss: 0.3576 - val_accuracy: 0.8723
Epoch 222/300
6/6 - 0s - loss: 0.1917 - accuracy: 0.9243 - val_loss: 0.3596 - val_accuracy: 0.8298
Epoch 223/300
6/6 - 0s - loss: 0.1938 - accuracy: 0.9243 - val_loss: 0.3616 - val_accuracy: 0.8298
Epoch 224/300
6/6 - 0s - loss: 0.1920 - accuracy: 0.9081 - val_loss: 0.3620 - val_accuracy: 0.8298
Epoch 225/300
6/6 - 0s - loss: 0.1521 - accuracy: 0.9405 - val_loss: 0.3597 - val_accuracy: 0.8511
Epoch 226/300
6/6 - 0s - loss: 0.1679 - accuracy: 0.9297 - val_loss: 0.3606 - val_accuracy: 0.8511
Epoch 227/300
6/6 - 0s - loss: 0.1682 - accuracy: 0.9297 - val_loss: 0.3637 - val_accuracy: 0.8511
Epoch 228/300
6/6 - 0s - loss: 0.1864 - accuracy: 0.9297 - val_loss: 0.3642 - val_accuracy: 0.8298
Epoch 229/300
6/6 - 0s - loss: 0.1976 - accuracy: 0.9081 - val_loss: 0.3656 - val_accuracy: 0.8085
Epoch 230/300
6/6 - 0s - loss: 0.1946 - accuracy: 0.9081 - val_loss: 0.3636 - val_accuracy: 0.8298
Epoch 231/300
6/6 - 0s - loss: 0.1908 - accuracy: 0.9027 - val_loss: 0.3649 - val_accuracy: 0.8085
Epoch 232/300
6/6 - 0s - loss: 0.1703 - accuracy: 0.9297 - val_loss: 0.3669 - val_accuracy: 0.8085
Epoch 233/300
6/6 - 0s - loss: 0.1888 - accuracy: 0.9135 - val_loss: 0.3663 - val_accuracy: 0.8085
Epoch 234/300
6/6 - 0s - loss: 0.1675 - accuracy: 0.9297 - val_loss: 0.3659 - val_accuracy: 0.8085
Epoch 235/300
6/6 - 0s - loss: 0.1904 - accuracy: 0.9351 - val_loss: 0.3649 - val_accuracy: 0.8085
Epoch 236/300
6/6 - 0s - loss: 0.1983 - accuracy: 0.9189 - val_loss: 0.3655 - val_accuracy: 0.8085
Epoch 237/300
6/6 - 0s - loss: 0.1958 - accuracy: 0.9243 - val_loss: 0.3658 - val_accuracy: 0.8085
Epoch 238/300
6/6 - 0s - loss: 0.1706 - accuracy: 0.9243 - val_loss: 0.3665 - val_accuracy: 0.8085
Epoch 239/300
6/6 - 0s - loss: 0.1817 - accuracy: 0.9189 - val_loss: 0.3661 - val_accuracy: 0.8298
Epoch 240/300
6/6 - 0s - loss: 0.1855 - accuracy: 0.9081 - val_loss: 0.3665 - val_accuracy: 0.8298
Epoch 241/300
6/6 - 0s - loss: 0.1861 - accuracy: 0.9351 - val_loss: 0.3668 - val_accuracy: 0.8298
Epoch 242/300
6/6 - 0s - loss: 0.2079 - accuracy: 0.9027 - val_loss: 0.3658 - val_accuracy: 0.8298
Epoch 243/300
6/6 - 0s - loss: 0.2000 - accuracy: 0.9027 - val_loss: 0.3662 - val_accuracy: 0.8298
Epoch 244/300
6/6 - 0s - loss: 0.1820 - accuracy: 0.9243 - val_loss: 0.3671 - val_accuracy: 0.8085
Epoch 245/300
6/6 - 0s - loss: 0.2172 - accuracy: 0.9081 - val_loss: 0.3667 - val_accuracy: 0.8085
Epoch 246/300
6/6 - 0s - loss: 0.1645 - accuracy: 0.9297 - val_loss: 0.3678 - val_accuracy: 0.8298
Epoch 247/300
6/6 - 0s - loss: 0.1598 - accuracy: 0.9405 - val_loss: 0.3673 - val_accuracy: 0.8511
Epoch 248/300
6/6 - 0s - loss: 0.1789 - accuracy: 0.9243 - val_loss: 0.3680 - val_accuracy: 0.8511
Epoch 249/300
6/6 - 0s - loss: 0.1640 - accuracy: 0.9568 - val_loss: 0.3687 - val_accuracy: 0.8511
Epoch 250/300
6/6 - 0s - loss: 0.1836 - accuracy: 0.9297 - val_loss: 0.3693 - val_accuracy: 0.8511
Epoch 251/300
6/6 - 0s - loss: 0.1999 - accuracy: 0.8973 - val_loss: 0.3678 - val_accuracy: 0.8298
Epoch 252/300
6/6 - 0s - loss: 0.1593 - accuracy: 0.9135 - val_loss: 0.3686 - val_accuracy: 0.8511
Epoch 253/300
6/6 - 0s - loss: 0.1701 - accuracy: 0.9405 - val_loss: 0.3694 - val_accuracy: 0.8511
Epoch 254/300
6/6 - 0s - loss: 0.1798 - accuracy: 0.9135 - val_loss: 0.3678 - val_accuracy: 0.8511
Epoch 255/300
6/6 - 0s - loss: 0.1887 - accuracy: 0.9081 - val_loss: 0.3690 - val_accuracy: 0.8511
Epoch 256/300
6/6 - 0s - loss: 0.1883 - accuracy: 0.9081 - val_loss: 0.3688 - val_accuracy: 0.8511
Epoch 257/300
6/6 - 0s - loss: 0.1700 - accuracy: 0.9243 - val_loss: 0.3683 - val_accuracy: 0.8511
Epoch 258/300
6/6 - 0s - loss: 0.1818 - accuracy: 0.9135 - val_loss: 0.3665 - val_accuracy: 0.8511
Epoch 259/300
6/6 - 0s - loss: 0.1691 - accuracy: 0.9189 - val_loss: 0.3673 - val_accuracy: 0.8511
Epoch 260/300
6/6 - 0s - loss: 0.1717 - accuracy: 0.9243 - val_loss: 0.3679 - val_accuracy: 0.8298
Epoch 261/300
6/6 - 0s - loss: 0.1661 - accuracy: 0.9297 - val_loss: 0.3706 - val_accuracy: 0.8298
Epoch 262/300
6/6 - 0s - loss: 0.1548 - accuracy: 0.9459 - val_loss: 0.3723 - val_accuracy: 0.8085
Epoch 263/300
6/6 - 0s - loss: 0.1798 - accuracy: 0.9351 - val_loss: 0.3725 - val_accuracy: 0.7872
Epoch 264/300
6/6 - 0s - loss: 0.1954 - accuracy: 0.9297 - val_loss: 0.3703 - val_accuracy: 0.8511
Epoch 265/300
6/6 - 0s - loss: 0.1562 - accuracy: 0.9351 - val_loss: 0.3684 - val_accuracy: 0.8511
Epoch 266/300
6/6 - 0s - loss: 0.1846 - accuracy: 0.9405 - val_loss: 0.3688 - val_accuracy: 0.8511
Epoch 267/300
6/6 - 0s - loss: 0.1678 - accuracy: 0.9297 - val_loss: 0.3684 - val_accuracy: 0.8511
Epoch 268/300
6/6 - 0s - loss: 0.1595 - accuracy: 0.9351 - val_loss: 0.3711 - val_accuracy: 0.8085
Epoch 269/300
6/6 - 0s - loss: 0.1515 - accuracy: 0.9568 - val_loss: 0.3730 - val_accuracy: 0.8085
Epoch 270/300
6/6 - 0s - loss: 0.1828 - accuracy: 0.9135 - val_loss: 0.3730 - val_accuracy: 0.8085
Epoch 271/300
6/6 - 0s - loss: 0.1666 - accuracy: 0.9297 - val_loss: 0.3756 - val_accuracy: 0.7872
Epoch 272/300
6/6 - 0s - loss: 0.1794 - accuracy: 0.9189 - val_loss: 0.3761 - val_accuracy: 0.7872
Epoch 273/300
6/6 - 0s - loss: 0.1653 - accuracy: 0.9189 - val_loss: 0.3766 - val_accuracy: 0.8298
Epoch 274/300
6/6 - 0s - loss: 0.1521 - accuracy: 0.9514 - val_loss: 0.3780 - val_accuracy: 0.8298
Epoch 275/300
6/6 - 0s - loss: 0.1762 - accuracy: 0.9189 - val_loss: 0.3787 - val_accuracy: 0.8085
Epoch 276/300
6/6 - 0s - loss: 0.1626 - accuracy: 0.9459 - val_loss: 0.3792 - val_accuracy: 0.8085
Epoch 277/300
6/6 - 0s - loss: 0.2023 - accuracy: 0.8973 - val_loss: 0.3776 - val_accuracy: 0.8085
Epoch 278/300
6/6 - 0s - loss: 0.1724 - accuracy: 0.9514 - val_loss: 0.3786 - val_accuracy: 0.8085
Epoch 279/300
6/6 - 0s - loss: 0.1849 - accuracy: 0.9027 - val_loss: 0.3810 - val_accuracy: 0.7872
Epoch 280/300
6/6 - 0s - loss: 0.1625 - accuracy: 0.9351 - val_loss: 0.3818 - val_accuracy: 0.7872
Epoch 281/300
6/6 - 0s - loss: 0.1837 - accuracy: 0.9189 - val_loss: 0.3812 - val_accuracy: 0.8085
Epoch 282/300
6/6 - 0s - loss: 0.1693 - accuracy: 0.9243 - val_loss: 0.3813 - val_accuracy: 0.8085
Epoch 283/300
6/6 - 0s - loss: 0.1694 - accuracy: 0.9297 - val_loss: 0.3818 - val_accuracy: 0.7872
Epoch 284/300
6/6 - 0s - loss: 0.1545 - accuracy: 0.9297 - val_loss: 0.3794 - val_accuracy: 0.7872
Epoch 285/300
6/6 - 0s - loss: 0.1604 - accuracy: 0.9297 - val_loss: 0.3807 - val_accuracy: 0.7872
Epoch 286/300
6/6 - 0s - loss: 0.1725 - accuracy: 0.9459 - val_loss: 0.3815 - val_accuracy: 0.7872
Epoch 287/300
6/6 - 0s - loss: 0.1317 - accuracy: 0.9514 - val_loss: 0.3833 - val_accuracy: 0.7872
Epoch 288/300
6/6 - 0s - loss: 0.1660 - accuracy: 0.9243 - val_loss: 0.3828 - val_accuracy: 0.7872
Epoch 289/300
6/6 - 0s - loss: 0.1646 - accuracy: 0.9405 - val_loss: 0.3832 - val_accuracy: 0.8085
Epoch 290/300
6/6 - 0s - loss: 0.1624 - accuracy: 0.9351 - val_loss: 0.3839 - val_accuracy: 0.8298
Epoch 291/300
6/6 - 0s - loss: 0.1530 - accuracy: 0.9622 - val_loss: 0.3858 - val_accuracy: 0.7872
Epoch 292/300
6/6 - 0s - loss: 0.1723 - accuracy: 0.9243 - val_loss: 0.3839 - val_accuracy: 0.8085
Epoch 293/300
6/6 - 0s - loss: 0.1697 - accuracy: 0.9189 - val_loss: 0.3836 - val_accuracy: 0.8298
Epoch 294/300
6/6 - 0s - loss: 0.1635 - accuracy: 0.9135 - val_loss: 0.3857 - val_accuracy: 0.7872
Epoch 295/300
6/6 - 0s - loss: 0.1856 - accuracy: 0.9135 - val_loss: 0.3858 - val_accuracy: 0.8085
Epoch 296/300
6/6 - 0s - loss: 0.1644 - accuracy: 0.9351 - val_loss: 0.3870 - val_accuracy: 0.8085
Epoch 297/300
6/6 - 0s - loss: 0.1604 - accuracy: 0.9297 - val_loss: 0.3859 - val_accuracy: 0.8298
Epoch 298/300
6/6 - 0s - loss: 0.1549 - accuracy: 0.9514 - val_loss: 0.3872 - val_accuracy: 0.8298
Epoch 299/300
6/6 - 0s - loss: 0.1394 - accuracy: 0.9351 - val_loss: 0.3885 - val_accuracy: 0.8085
Epoch 300/300
6/6 - 0s - loss: 0.1622 - accuracy: 0.9297 - val_loss: 0.3880 - val_accuracy: 0.8298

Accuracy Loss Graphs

In [20]:
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label = 'val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend(loc='lower right')
Out[20]:
<matplotlib.legend.Legend at 0x7fc188194048>
In [21]:
plt.plot(history.history['loss'], label='loss')
plt.plot(history.history['val_loss'], label = 'val_loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend(loc='lower right')
Out[21]:
<matplotlib.legend.Legend at 0x7fc18814b5f8>
In [22]:
score = model.evaluate(X_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
Test loss: 0.30144044756889343
Test accuracy: 0.8846153616905212
In [23]:
model.save('model.h5')

Deep CC

In [24]:
!deepCC model.h5
[INFO]
Reading [keras model] 'model.h5'
[SUCCESS]
Saved 'model_deepC/model.onnx'
[INFO]
Reading [onnx model] 'model_deepC/model.onnx'
[INFO]
Model info:
  ir_vesion : 4
  doc       : 
[WARNING]
[ONNX]: terminal (input/output) dense_input's shape is less than 1. Changing it to 1.
[WARNING]
[ONNX]: terminal (input/output) dense_1's shape is less than 1. Changing it to 1.
[INFO]
Running DNNC graph sanity check ...
[SUCCESS]
Passed sanity check.
[INFO]
Writing C++ file 'model_deepC/model.cpp'
[INFO]
deepSea model files are ready in 'model_deepC/' 
[RUNNING COMMAND]
g++ -std=c++11 -O3 -fno-rtti -fno-exceptions -I. -I/opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/include -isystem /opt/tljh/user/lib/python3.7/site-packages/deepC-0.13-py3.7-linux-x86_64.egg/deepC/packages/eigen-eigen-323c052e1731 "model_deepC/model.cpp" -D_AITS_MAIN -o "model_deepC/model.exe"
[RUNNING COMMAND]
size "model_deepC/model.exe"
   text	   data	    bss	    dec	    hex	filename
 121547	   2968	    760	 125275	  1e95b	model_deepC/model.exe
[SUCCESS]
Saved model as executable "model_deepC/model.exe"
In [ ]: