Imports

In [54]:
%%capture
!pip install keras
!pip install tensorflow
In [55]:
import keras
import tensorflow
from keras import layers
from sklearn.datasets import load_breast_cancer 
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
from sklearn.metrics import confusion_matrix
In [56]:
import warnings
warnings.filterwarnings("ignore")

Importing Wisconsin Breast Cancer Dataset from sklearn

In [57]:
data = load_breast_cancer()
print(data.keys())
dict_keys(['data', 'target', 'frame', 'target_names', 'DESCR', 'feature_names', 'filename', 'data_module'])
In [58]:
X = data.data
y = data.target

Creating two dataframes (features and respective targets) for EDA

In [59]:
features = pd.DataFrame(X, columns = data.feature_names)
targets = pd.DataFrame(y, columns = ['target'])
In [60]:
features.columns
Out[60]:
Index(['mean radius', 'mean texture', 'mean perimeter', 'mean area',
       'mean smoothness', 'mean compactness', 'mean concavity',
       'mean concave points', 'mean symmetry', 'mean fractal dimension',
       'radius error', 'texture error', 'perimeter error', 'area error',
       'smoothness error', 'compactness error', 'concavity error',
       'concave points error', 'symmetry error', 'fractal dimension error',
       'worst radius', 'worst texture', 'worst perimeter', 'worst area',
       'worst smoothness', 'worst compactness', 'worst concavity',
       'worst concave points', 'worst symmetry', 'worst fractal dimension'],
      dtype='object')
In [61]:
#features.head().T
#targets.isnull().sum()
#targets.value_counts()
#features.info()
In [62]:
count_m = targets[targets['target'] == 1].shape[0]
count_b = targets[targets['target'] == 0].shape[0]

percentage_m = (count_m/targets.shape[0]) * 100
percentage_b = (count_b/targets.shape[0]) * 100
In [63]:
#for column in features.columns:
    #plt.figure()
    #sns.histplot(features[column], kde=True, color='skyblue')
    #plt.title(f'Distribution of {column}', fontsize=16)
    #plt.xlabel('Value', fontsize=14)
    #plt.ylabel('Frequency', fontsize=14)
    #plt.grid(True)
    #plt.show()

EDA

In [64]:
features.shape
Out[64]:
(569, 30)
In [65]:
g = sns.FacetGrid(features.melt(), col='variable', col_wrap=4, sharex=False, sharey=False, height=3)
g.map(sns.kdeplot, 'value', fill=True)
g.set_titles('Density Plot: {col_name}')
plt.tight_layout()
plt.show()
In [66]:
correlation_matrix = features.corr()
sns.heatmap(correlation_matrix, annot=False, cmap='coolwarm')
plt.title('Features Correlation Heatmap')
plt.show()

Train test split + data standardization + shuffling

  • Using min max scaler to standardize the data, as features were quite different distributed
  • Test size = 0.25, as we do not have too much data
In [67]:
X_train, X_test, y_train, y_test = train_test_split(features, targets, test_size=0.25, random_state=36)
scaler = MinMaxScaler()

X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

X_train_shuffled, y_train_shuffled = shuffle(X_train, y_train, random_state=36)

Building model with Keras

  • Input shape = 30, output shape = probability (value between 0 and 1)
  • 7 layers, 8 neurons in the hidden layers, he_normal weight initializer, l2 + dropout regularization, final sigmoid activation
In [69]:
model_1 = keras.models.Sequential([ # more layers and regularization
    keras.layers.Flatten(input_shape=[30]),
    keras.layers.Dense(8, activation='relu', kernel_initializer = 'he_normal'),
    keras.layers.Dense(8, activation='relu', kernel_regularizer = keras.regularizers.l2(0.01)),
    keras.layers.Dropout(rate=0.1),
    keras.layers.Dense(8, activation='relu'), #kernel_regularizer = keras.regularizers.l2(0.01)),
    keras.layers.Dropout(rate=0.1),
    keras.layers.Dense(1, activation='sigmoid')
])
In [70]:
print(model_1.summary())
# As we are using dropout during training, there is a higher loss during training than during validation! The model is hence more robust during val than train due to dropout
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_2 (Flatten)         (None, 30)                0         
                                                                 
 dense_8 (Dense)             (None, 8)                 248       
                                                                 
 dense_9 (Dense)             (None, 8)                 72        
                                                                 
 dropout_4 (Dropout)         (None, 8)                 0         
                                                                 
 dense_10 (Dense)            (None, 8)                 72        
                                                                 
 dropout_5 (Dropout)         (None, 8)                 0         
                                                                 
 dense_11 (Dense)            (None, 1)                 9         
                                                                 
=================================================================
Total params: 401 (1.57 KB)
Trainable params: 401 (1.57 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
None

Training the model

  • Loss = Binary Cross Entropy
  • Optimizer = Adam at 0.001 learning rate
  • Metric = accuracy
  • Batch size = 64
  • Epochs = 300
In [71]:
model_1.compile(loss='BinaryCrossentropy', optimizer=keras.optimizers.Adam(learning_rate=0.001),  metrics = 'accuracy')
history = model_1.fit(X_train_shuffled, y_train_shuffled, batch_size = 64, epochs = 300, validation_data = (X_test, y_test))
Epoch 1/300
7/7 [==============================] - 1s 27ms/step - loss: 0.7831 - accuracy: 0.5493 - val_loss: 0.7472 - val_accuracy: 0.6084
Epoch 2/300
7/7 [==============================] - 0s 6ms/step - loss: 0.7538 - accuracy: 0.6150 - val_loss: 0.7297 - val_accuracy: 0.6573
Epoch 3/300
7/7 [==============================] - 0s 7ms/step - loss: 0.7436 - accuracy: 0.6362 - val_loss: 0.7167 - val_accuracy: 0.6783
Epoch 4/300
7/7 [==============================] - 0s 5ms/step - loss: 0.7284 - accuracy: 0.6714 - val_loss: 0.7039 - val_accuracy: 0.6923
Epoch 5/300
7/7 [==============================] - 0s 5ms/step - loss: 0.7156 - accuracy: 0.6925 - val_loss: 0.6909 - val_accuracy: 0.7483
Epoch 6/300
7/7 [==============================] - 0s 5ms/step - loss: 0.7008 - accuracy: 0.7230 - val_loss: 0.6788 - val_accuracy: 0.7692
Epoch 7/300
7/7 [==============================] - 0s 5ms/step - loss: 0.6944 - accuracy: 0.7371 - val_loss: 0.6665 - val_accuracy: 0.7902
Epoch 8/300
7/7 [==============================] - 0s 5ms/step - loss: 0.6834 - accuracy: 0.7746 - val_loss: 0.6537 - val_accuracy: 0.8112
Epoch 9/300
7/7 [==============================] - 0s 8ms/step - loss: 0.6603 - accuracy: 0.7911 - val_loss: 0.6404 - val_accuracy: 0.8182
Epoch 10/300
7/7 [==============================] - 0s 8ms/step - loss: 0.6483 - accuracy: 0.7723 - val_loss: 0.6267 - val_accuracy: 0.8182
Epoch 11/300
7/7 [==============================] - 0s 8ms/step - loss: 0.6429 - accuracy: 0.7840 - val_loss: 0.6125 - val_accuracy: 0.8252
Epoch 12/300
7/7 [==============================] - 0s 8ms/step - loss: 0.6259 - accuracy: 0.7911 - val_loss: 0.5978 - val_accuracy: 0.8392
Epoch 13/300
7/7 [==============================] - 0s 5ms/step - loss: 0.6103 - accuracy: 0.8099 - val_loss: 0.5823 - val_accuracy: 0.8392
Epoch 14/300
7/7 [==============================] - 0s 8ms/step - loss: 0.5911 - accuracy: 0.8263 - val_loss: 0.5665 - val_accuracy: 0.8392
Epoch 15/300
7/7 [==============================] - 0s 5ms/step - loss: 0.5938 - accuracy: 0.7958 - val_loss: 0.5497 - val_accuracy: 0.8671
Epoch 16/300
7/7 [==============================] - 0s 5ms/step - loss: 0.5699 - accuracy: 0.8239 - val_loss: 0.5333 - val_accuracy: 0.8601
Epoch 17/300
7/7 [==============================] - 0s 5ms/step - loss: 0.5559 - accuracy: 0.8310 - val_loss: 0.5168 - val_accuracy: 0.8671
Epoch 18/300
7/7 [==============================] - 0s 5ms/step - loss: 0.5446 - accuracy: 0.8333 - val_loss: 0.4994 - val_accuracy: 0.9021
Epoch 19/300
7/7 [==============================] - 0s 7ms/step - loss: 0.5204 - accuracy: 0.8380 - val_loss: 0.4823 - val_accuracy: 0.9091
Epoch 20/300
7/7 [==============================] - 0s 8ms/step - loss: 0.5075 - accuracy: 0.8427 - val_loss: 0.4654 - val_accuracy: 0.9231
Epoch 21/300
7/7 [==============================] - 0s 11ms/step - loss: 0.5134 - accuracy: 0.8286 - val_loss: 0.4486 - val_accuracy: 0.9510
Epoch 22/300
7/7 [==============================] - 0s 8ms/step - loss: 0.4739 - accuracy: 0.8615 - val_loss: 0.4313 - val_accuracy: 0.9510
Epoch 23/300
7/7 [==============================] - 0s 5ms/step - loss: 0.4628 - accuracy: 0.8685 - val_loss: 0.4150 - val_accuracy: 0.9510
Epoch 24/300
7/7 [==============================] - 0s 8ms/step - loss: 0.4523 - accuracy: 0.8545 - val_loss: 0.3992 - val_accuracy: 0.9510
Epoch 25/300
7/7 [==============================] - 0s 7ms/step - loss: 0.4370 - accuracy: 0.8803 - val_loss: 0.3837 - val_accuracy: 0.9510
Epoch 26/300
7/7 [==============================] - 0s 5ms/step - loss: 0.4254 - accuracy: 0.8756 - val_loss: 0.3687 - val_accuracy: 0.9510
Epoch 27/300
7/7 [==============================] - 0s 8ms/step - loss: 0.4141 - accuracy: 0.8826 - val_loss: 0.3543 - val_accuracy: 0.9510
Epoch 28/300
7/7 [==============================] - 0s 5ms/step - loss: 0.4085 - accuracy: 0.8662 - val_loss: 0.3416 - val_accuracy: 0.9510
Epoch 29/300
7/7 [==============================] - 0s 10ms/step - loss: 0.3941 - accuracy: 0.8709 - val_loss: 0.3285 - val_accuracy: 0.9510
Epoch 30/300
7/7 [==============================] - 0s 7ms/step - loss: 0.3657 - accuracy: 0.8944 - val_loss: 0.3172 - val_accuracy: 0.9510
Epoch 31/300
7/7 [==============================] - 0s 8ms/step - loss: 0.3679 - accuracy: 0.8756 - val_loss: 0.3063 - val_accuracy: 0.9510
Epoch 32/300
7/7 [==============================] - 0s 7ms/step - loss: 0.3647 - accuracy: 0.8803 - val_loss: 0.2964 - val_accuracy: 0.9510
Epoch 33/300
7/7 [==============================] - 0s 8ms/step - loss: 0.3444 - accuracy: 0.8991 - val_loss: 0.2873 - val_accuracy: 0.9510
Epoch 34/300
7/7 [==============================] - 0s 7ms/step - loss: 0.3479 - accuracy: 0.8897 - val_loss: 0.2784 - val_accuracy: 0.9510
Epoch 35/300
7/7 [==============================] - 0s 7ms/step - loss: 0.3364 - accuracy: 0.8873 - val_loss: 0.2705 - val_accuracy: 0.9510
Epoch 36/300
7/7 [==============================] - 0s 6ms/step - loss: 0.3532 - accuracy: 0.8779 - val_loss: 0.2649 - val_accuracy: 0.9580
Epoch 37/300
7/7 [==============================] - 0s 6ms/step - loss: 0.3224 - accuracy: 0.8920 - val_loss: 0.2564 - val_accuracy: 0.9441
Epoch 38/300
7/7 [==============================] - 0s 8ms/step - loss: 0.3257 - accuracy: 0.8897 - val_loss: 0.2515 - val_accuracy: 0.9510
Epoch 39/300
7/7 [==============================] - 0s 7ms/step - loss: 0.3111 - accuracy: 0.8873 - val_loss: 0.2442 - val_accuracy: 0.9510
Epoch 40/300
7/7 [==============================] - 0s 10ms/step - loss: 0.3218 - accuracy: 0.8897 - val_loss: 0.2414 - val_accuracy: 0.9720
Epoch 41/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2949 - accuracy: 0.9061 - val_loss: 0.2326 - val_accuracy: 0.9441
Epoch 42/300
7/7 [==============================] - 0s 8ms/step - loss: 0.3021 - accuracy: 0.8944 - val_loss: 0.2276 - val_accuracy: 0.9510
Epoch 43/300
7/7 [==============================] - 0s 4ms/step - loss: 0.2910 - accuracy: 0.9014 - val_loss: 0.2227 - val_accuracy: 0.9510
Epoch 44/300
7/7 [==============================] - 0s 6ms/step - loss: 0.2732 - accuracy: 0.9155 - val_loss: 0.2172 - val_accuracy: 0.9510
Epoch 45/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2841 - accuracy: 0.9014 - val_loss: 0.2113 - val_accuracy: 0.9510
Epoch 46/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2802 - accuracy: 0.9085 - val_loss: 0.2064 - val_accuracy: 0.9580
Epoch 47/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2654 - accuracy: 0.9296 - val_loss: 0.2002 - val_accuracy: 0.9510
Epoch 48/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2359 - accuracy: 0.9507 - val_loss: 0.1947 - val_accuracy: 0.9510
Epoch 49/300
7/7 [==============================] - 0s 7ms/step - loss: 0.2643 - accuracy: 0.9319 - val_loss: 0.1897 - val_accuracy: 0.9510
Epoch 50/300
7/7 [==============================] - 0s 14ms/step - loss: 0.2561 - accuracy: 0.9484 - val_loss: 0.1847 - val_accuracy: 0.9580
Epoch 51/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2469 - accuracy: 0.9413 - val_loss: 0.1795 - val_accuracy: 0.9580
Epoch 52/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2457 - accuracy: 0.9319 - val_loss: 0.1749 - val_accuracy: 0.9580
Epoch 53/300
7/7 [==============================] - 0s 10ms/step - loss: 0.2352 - accuracy: 0.9484 - val_loss: 0.1688 - val_accuracy: 0.9650
Epoch 54/300
7/7 [==============================] - 0s 9ms/step - loss: 0.2281 - accuracy: 0.9531 - val_loss: 0.1640 - val_accuracy: 0.9650
Epoch 55/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2214 - accuracy: 0.9366 - val_loss: 0.1601 - val_accuracy: 0.9580
Epoch 56/300
7/7 [==============================] - 0s 5ms/step - loss: 0.2102 - accuracy: 0.9460 - val_loss: 0.1561 - val_accuracy: 0.9650
Epoch 57/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2290 - accuracy: 0.9437 - val_loss: 0.1524 - val_accuracy: 0.9650
Epoch 58/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2064 - accuracy: 0.9460 - val_loss: 0.1505 - val_accuracy: 0.9650
Epoch 59/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2277 - accuracy: 0.9319 - val_loss: 0.1476 - val_accuracy: 0.9720
Epoch 60/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2055 - accuracy: 0.9484 - val_loss: 0.1453 - val_accuracy: 0.9650
Epoch 61/300
7/7 [==============================] - 0s 8ms/step - loss: 0.2120 - accuracy: 0.9390 - val_loss: 0.1438 - val_accuracy: 0.9790
Epoch 62/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1981 - accuracy: 0.9531 - val_loss: 0.1410 - val_accuracy: 0.9720
Epoch 63/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1874 - accuracy: 0.9577 - val_loss: 0.1394 - val_accuracy: 0.9720
Epoch 64/300
7/7 [==============================] - 0s 7ms/step - loss: 0.2003 - accuracy: 0.9531 - val_loss: 0.1371 - val_accuracy: 0.9720
Epoch 65/300
7/7 [==============================] - 0s 11ms/step - loss: 0.1883 - accuracy: 0.9343 - val_loss: 0.1344 - val_accuracy: 0.9790
Epoch 66/300
7/7 [==============================] - 0s 10ms/step - loss: 0.1805 - accuracy: 0.9577 - val_loss: 0.1329 - val_accuracy: 0.9790
Epoch 67/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1770 - accuracy: 0.9507 - val_loss: 0.1310 - val_accuracy: 0.9860
Epoch 68/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1812 - accuracy: 0.9531 - val_loss: 0.1301 - val_accuracy: 0.9720
Epoch 69/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1757 - accuracy: 0.9601 - val_loss: 0.1275 - val_accuracy: 0.9790
Epoch 70/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1928 - accuracy: 0.9437 - val_loss: 0.1259 - val_accuracy: 0.9790
Epoch 71/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1673 - accuracy: 0.9624 - val_loss: 0.1246 - val_accuracy: 0.9860
Epoch 72/300
7/7 [==============================] - 0s 9ms/step - loss: 0.1673 - accuracy: 0.9577 - val_loss: 0.1245 - val_accuracy: 0.9790
Epoch 73/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1766 - accuracy: 0.9554 - val_loss: 0.1218 - val_accuracy: 0.9860
Epoch 74/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1754 - accuracy: 0.9484 - val_loss: 0.1216 - val_accuracy: 0.9790
Epoch 75/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1671 - accuracy: 0.9531 - val_loss: 0.1201 - val_accuracy: 0.9860
Epoch 76/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1679 - accuracy: 0.9507 - val_loss: 0.1237 - val_accuracy: 0.9720
Epoch 77/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1651 - accuracy: 0.9601 - val_loss: 0.1177 - val_accuracy: 0.9790
Epoch 78/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1712 - accuracy: 0.9554 - val_loss: 0.1172 - val_accuracy: 0.9790
Epoch 79/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1546 - accuracy: 0.9554 - val_loss: 0.1155 - val_accuracy: 0.9860
Epoch 80/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1524 - accuracy: 0.9577 - val_loss: 0.1139 - val_accuracy: 0.9860
Epoch 81/300
7/7 [==============================] - 0s 14ms/step - loss: 0.1532 - accuracy: 0.9601 - val_loss: 0.1132 - val_accuracy: 0.9860
Epoch 82/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1533 - accuracy: 0.9671 - val_loss: 0.1116 - val_accuracy: 0.9860
Epoch 83/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1602 - accuracy: 0.9624 - val_loss: 0.1107 - val_accuracy: 0.9790
Epoch 84/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1470 - accuracy: 0.9601 - val_loss: 0.1103 - val_accuracy: 0.9790
Epoch 85/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1485 - accuracy: 0.9648 - val_loss: 0.1090 - val_accuracy: 0.9860
Epoch 86/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1418 - accuracy: 0.9695 - val_loss: 0.1084 - val_accuracy: 0.9860
Epoch 87/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1392 - accuracy: 0.9718 - val_loss: 0.1086 - val_accuracy: 0.9860
Epoch 88/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1342 - accuracy: 0.9718 - val_loss: 0.1063 - val_accuracy: 0.9860
Epoch 89/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1533 - accuracy: 0.9671 - val_loss: 0.1056 - val_accuracy: 0.9860
Epoch 90/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1501 - accuracy: 0.9624 - val_loss: 0.1050 - val_accuracy: 0.9860
Epoch 91/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1448 - accuracy: 0.9624 - val_loss: 0.1043 - val_accuracy: 0.9860
Epoch 92/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1362 - accuracy: 0.9624 - val_loss: 0.1038 - val_accuracy: 0.9860
Epoch 93/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1307 - accuracy: 0.9765 - val_loss: 0.1031 - val_accuracy: 0.9860
Epoch 94/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1358 - accuracy: 0.9765 - val_loss: 0.1026 - val_accuracy: 0.9860
Epoch 95/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1298 - accuracy: 0.9695 - val_loss: 0.1017 - val_accuracy: 0.9860
Epoch 96/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1362 - accuracy: 0.9648 - val_loss: 0.1011 - val_accuracy: 0.9860
Epoch 97/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1462 - accuracy: 0.9577 - val_loss: 0.1011 - val_accuracy: 0.9860
Epoch 98/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1526 - accuracy: 0.9507 - val_loss: 0.1014 - val_accuracy: 0.9860
Epoch 99/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1428 - accuracy: 0.9577 - val_loss: 0.1000 - val_accuracy: 0.9860
Epoch 100/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1276 - accuracy: 0.9718 - val_loss: 0.1001 - val_accuracy: 0.9860
Epoch 101/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1342 - accuracy: 0.9695 - val_loss: 0.0989 - val_accuracy: 0.9860
Epoch 102/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1366 - accuracy: 0.9648 - val_loss: 0.0997 - val_accuracy: 0.9860
Epoch 103/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1369 - accuracy: 0.9671 - val_loss: 0.0980 - val_accuracy: 0.9860
Epoch 104/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1347 - accuracy: 0.9671 - val_loss: 0.0981 - val_accuracy: 0.9860
Epoch 105/300
7/7 [==============================] - 0s 14ms/step - loss: 0.1288 - accuracy: 0.9648 - val_loss: 0.0974 - val_accuracy: 0.9860
Epoch 106/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1194 - accuracy: 0.9718 - val_loss: 0.0964 - val_accuracy: 0.9860
Epoch 107/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1234 - accuracy: 0.9789 - val_loss: 0.0962 - val_accuracy: 0.9860
Epoch 108/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1285 - accuracy: 0.9695 - val_loss: 0.0954 - val_accuracy: 0.9860
Epoch 109/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1319 - accuracy: 0.9624 - val_loss: 0.0951 - val_accuracy: 0.9930
Epoch 110/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1164 - accuracy: 0.9718 - val_loss: 0.0953 - val_accuracy: 0.9860
Epoch 111/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1226 - accuracy: 0.9718 - val_loss: 0.0937 - val_accuracy: 0.9860
Epoch 112/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1241 - accuracy: 0.9718 - val_loss: 0.0932 - val_accuracy: 0.9860
Epoch 113/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1345 - accuracy: 0.9624 - val_loss: 0.0926 - val_accuracy: 0.9860
Epoch 114/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1313 - accuracy: 0.9718 - val_loss: 0.0924 - val_accuracy: 0.9860
Epoch 115/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1205 - accuracy: 0.9742 - val_loss: 0.0921 - val_accuracy: 0.9860
Epoch 116/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1349 - accuracy: 0.9577 - val_loss: 0.0920 - val_accuracy: 0.9930
Epoch 117/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1120 - accuracy: 0.9765 - val_loss: 0.0915 - val_accuracy: 0.9860
Epoch 118/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1283 - accuracy: 0.9648 - val_loss: 0.0912 - val_accuracy: 0.9860
Epoch 119/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1177 - accuracy: 0.9742 - val_loss: 0.0909 - val_accuracy: 0.9860
Epoch 120/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1303 - accuracy: 0.9648 - val_loss: 0.0908 - val_accuracy: 0.9930
Epoch 121/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1273 - accuracy: 0.9718 - val_loss: 0.0907 - val_accuracy: 0.9860
Epoch 122/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1219 - accuracy: 0.9765 - val_loss: 0.0911 - val_accuracy: 0.9860
Epoch 123/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1252 - accuracy: 0.9695 - val_loss: 0.0904 - val_accuracy: 0.9860
Epoch 124/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1187 - accuracy: 0.9695 - val_loss: 0.0901 - val_accuracy: 0.9930
Epoch 125/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1337 - accuracy: 0.9624 - val_loss: 0.0922 - val_accuracy: 0.9860
Epoch 126/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1182 - accuracy: 0.9718 - val_loss: 0.0894 - val_accuracy: 0.9860
Epoch 127/300
7/7 [==============================] - 0s 13ms/step - loss: 0.1162 - accuracy: 0.9695 - val_loss: 0.0889 - val_accuracy: 0.9860
Epoch 128/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1141 - accuracy: 0.9695 - val_loss: 0.0887 - val_accuracy: 0.9930
Epoch 129/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1076 - accuracy: 0.9765 - val_loss: 0.0887 - val_accuracy: 0.9860
Epoch 130/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1173 - accuracy: 0.9601 - val_loss: 0.0876 - val_accuracy: 0.9860
Epoch 131/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1161 - accuracy: 0.9742 - val_loss: 0.0872 - val_accuracy: 0.9860
Epoch 132/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1095 - accuracy: 0.9789 - val_loss: 0.0876 - val_accuracy: 0.9860
Epoch 133/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1091 - accuracy: 0.9765 - val_loss: 0.0871 - val_accuracy: 0.9930
Epoch 134/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1074 - accuracy: 0.9765 - val_loss: 0.0863 - val_accuracy: 0.9860
Epoch 135/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1069 - accuracy: 0.9765 - val_loss: 0.0858 - val_accuracy: 0.9930
Epoch 136/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1166 - accuracy: 0.9695 - val_loss: 0.0859 - val_accuracy: 0.9930
Epoch 137/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1131 - accuracy: 0.9765 - val_loss: 0.0855 - val_accuracy: 0.9860
Epoch 138/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1132 - accuracy: 0.9742 - val_loss: 0.0852 - val_accuracy: 0.9930
Epoch 139/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1047 - accuracy: 0.9718 - val_loss: 0.0850 - val_accuracy: 0.9860
Epoch 140/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1047 - accuracy: 0.9789 - val_loss: 0.0847 - val_accuracy: 0.9930
Epoch 141/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1091 - accuracy: 0.9742 - val_loss: 0.0844 - val_accuracy: 0.9930
Epoch 142/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1055 - accuracy: 0.9765 - val_loss: 0.0840 - val_accuracy: 0.9930
Epoch 143/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1003 - accuracy: 0.9718 - val_loss: 0.0839 - val_accuracy: 0.9860
Epoch 144/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1015 - accuracy: 0.9789 - val_loss: 0.0835 - val_accuracy: 0.9860
Epoch 145/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1025 - accuracy: 0.9765 - val_loss: 0.0831 - val_accuracy: 0.9860
Epoch 146/300
7/7 [==============================] - 0s 13ms/step - loss: 0.1071 - accuracy: 0.9765 - val_loss: 0.0831 - val_accuracy: 0.9930
Epoch 147/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0941 - accuracy: 0.9812 - val_loss: 0.0826 - val_accuracy: 0.9860
Epoch 148/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1040 - accuracy: 0.9718 - val_loss: 0.0823 - val_accuracy: 0.9930
Epoch 149/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1050 - accuracy: 0.9765 - val_loss: 0.0821 - val_accuracy: 0.9860
Epoch 150/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0975 - accuracy: 0.9789 - val_loss: 0.0819 - val_accuracy: 0.9930
Epoch 151/300
7/7 [==============================] - 0s 7ms/step - loss: 0.1030 - accuracy: 0.9789 - val_loss: 0.0820 - val_accuracy: 0.9860
Epoch 152/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0989 - accuracy: 0.9789 - val_loss: 0.0816 - val_accuracy: 0.9930
Epoch 153/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1119 - accuracy: 0.9671 - val_loss: 0.0810 - val_accuracy: 0.9930
Epoch 154/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1087 - accuracy: 0.9718 - val_loss: 0.0808 - val_accuracy: 0.9930
Epoch 155/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0973 - accuracy: 0.9789 - val_loss: 0.0823 - val_accuracy: 0.9860
Epoch 156/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1063 - accuracy: 0.9765 - val_loss: 0.0809 - val_accuracy: 0.9860
Epoch 157/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1009 - accuracy: 0.9742 - val_loss: 0.0801 - val_accuracy: 0.9930
Epoch 158/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1017 - accuracy: 0.9742 - val_loss: 0.0802 - val_accuracy: 0.9860
Epoch 159/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0980 - accuracy: 0.9812 - val_loss: 0.0797 - val_accuracy: 0.9860
Epoch 160/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1055 - accuracy: 0.9742 - val_loss: 0.0799 - val_accuracy: 0.9860
Epoch 161/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0979 - accuracy: 0.9718 - val_loss: 0.0796 - val_accuracy: 0.9860
Epoch 162/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0962 - accuracy: 0.9789 - val_loss: 0.0794 - val_accuracy: 0.9860
Epoch 163/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1011 - accuracy: 0.9742 - val_loss: 0.0794 - val_accuracy: 0.9860
Epoch 164/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0918 - accuracy: 0.9765 - val_loss: 0.0782 - val_accuracy: 0.9930
Epoch 165/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0972 - accuracy: 0.9718 - val_loss: 0.0785 - val_accuracy: 0.9860
Epoch 166/300
7/7 [==============================] - 0s 10ms/step - loss: 0.1014 - accuracy: 0.9718 - val_loss: 0.0781 - val_accuracy: 0.9860
Epoch 167/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0898 - accuracy: 0.9765 - val_loss: 0.0775 - val_accuracy: 0.9930
Epoch 168/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0962 - accuracy: 0.9742 - val_loss: 0.0772 - val_accuracy: 0.9930
Epoch 169/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1023 - accuracy: 0.9718 - val_loss: 0.0770 - val_accuracy: 0.9930
Epoch 170/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1067 - accuracy: 0.9742 - val_loss: 0.0770 - val_accuracy: 0.9930
Epoch 171/300
7/7 [==============================] - 0s 5ms/step - loss: 0.1093 - accuracy: 0.9742 - val_loss: 0.0776 - val_accuracy: 0.9860
Epoch 172/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0994 - accuracy: 0.9789 - val_loss: 0.0771 - val_accuracy: 0.9930
Epoch 173/300
7/7 [==============================] - 0s 8ms/step - loss: 0.1057 - accuracy: 0.9671 - val_loss: 0.0789 - val_accuracy: 0.9860
Epoch 174/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0984 - accuracy: 0.9812 - val_loss: 0.0795 - val_accuracy: 0.9860
Epoch 175/300
7/7 [==============================] - 0s 9ms/step - loss: 0.1009 - accuracy: 0.9742 - val_loss: 0.0806 - val_accuracy: 0.9790
Epoch 176/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0995 - accuracy: 0.9718 - val_loss: 0.0784 - val_accuracy: 0.9860
Epoch 177/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0965 - accuracy: 0.9695 - val_loss: 0.0785 - val_accuracy: 0.9860
Epoch 178/300
7/7 [==============================] - 0s 13ms/step - loss: 0.1003 - accuracy: 0.9671 - val_loss: 0.0761 - val_accuracy: 0.9930
Epoch 179/300
7/7 [==============================] - 0s 6ms/step - loss: 0.1125 - accuracy: 0.9695 - val_loss: 0.0843 - val_accuracy: 0.9790
Epoch 180/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0987 - accuracy: 0.9601 - val_loss: 0.0763 - val_accuracy: 0.9930
Epoch 181/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0823 - accuracy: 0.9789 - val_loss: 0.0770 - val_accuracy: 0.9860
Epoch 182/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0927 - accuracy: 0.9789 - val_loss: 0.0757 - val_accuracy: 0.9930
Epoch 183/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0928 - accuracy: 0.9789 - val_loss: 0.0758 - val_accuracy: 0.9930
Epoch 184/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0992 - accuracy: 0.9789 - val_loss: 0.0752 - val_accuracy: 0.9930
Epoch 185/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0918 - accuracy: 0.9836 - val_loss: 0.0751 - val_accuracy: 0.9930
Epoch 186/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0940 - accuracy: 0.9742 - val_loss: 0.0752 - val_accuracy: 0.9930
Epoch 187/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0862 - accuracy: 0.9765 - val_loss: 0.0749 - val_accuracy: 0.9930
Epoch 188/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0892 - accuracy: 0.9812 - val_loss: 0.0751 - val_accuracy: 0.9860
Epoch 189/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0906 - accuracy: 0.9765 - val_loss: 0.0746 - val_accuracy: 0.9930
Epoch 190/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0841 - accuracy: 0.9836 - val_loss: 0.0742 - val_accuracy: 0.9930
Epoch 191/300
7/7 [==============================] - 0s 12ms/step - loss: 0.0938 - accuracy: 0.9742 - val_loss: 0.0745 - val_accuracy: 0.9860
Epoch 192/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0940 - accuracy: 0.9789 - val_loss: 0.0746 - val_accuracy: 0.9860
Epoch 193/300
7/7 [==============================] - 0s 10ms/step - loss: 0.0890 - accuracy: 0.9765 - val_loss: 0.0746 - val_accuracy: 0.9860
Epoch 194/300
7/7 [==============================] - 0s 14ms/step - loss: 0.0912 - accuracy: 0.9765 - val_loss: 0.0740 - val_accuracy: 0.9930
Epoch 195/300
7/7 [==============================] - 0s 10ms/step - loss: 0.0842 - accuracy: 0.9789 - val_loss: 0.0739 - val_accuracy: 0.9860
Epoch 196/300
7/7 [==============================] - 0s 12ms/step - loss: 0.1003 - accuracy: 0.9695 - val_loss: 0.0735 - val_accuracy: 0.9930
Epoch 197/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0971 - accuracy: 0.9718 - val_loss: 0.0741 - val_accuracy: 0.9860
Epoch 198/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0976 - accuracy: 0.9765 - val_loss: 0.0739 - val_accuracy: 0.9930
Epoch 199/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0911 - accuracy: 0.9742 - val_loss: 0.0733 - val_accuracy: 0.9930
Epoch 200/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0958 - accuracy: 0.9695 - val_loss: 0.0787 - val_accuracy: 0.9790
Epoch 201/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0908 - accuracy: 0.9765 - val_loss: 0.0737 - val_accuracy: 0.9860
Epoch 202/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0977 - accuracy: 0.9742 - val_loss: 0.0733 - val_accuracy: 0.9860
Epoch 203/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0800 - accuracy: 0.9836 - val_loss: 0.0728 - val_accuracy: 0.9930
Epoch 204/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0882 - accuracy: 0.9789 - val_loss: 0.0727 - val_accuracy: 0.9930
Epoch 205/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0898 - accuracy: 0.9812 - val_loss: 0.0733 - val_accuracy: 0.9860
Epoch 206/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0904 - accuracy: 0.9812 - val_loss: 0.0727 - val_accuracy: 0.9930
Epoch 207/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0915 - accuracy: 0.9836 - val_loss: 0.0728 - val_accuracy: 0.9930
Epoch 208/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0923 - accuracy: 0.9812 - val_loss: 0.0725 - val_accuracy: 0.9930
Epoch 209/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0855 - accuracy: 0.9765 - val_loss: 0.0726 - val_accuracy: 0.9930
Epoch 210/300
7/7 [==============================] - 0s 12ms/step - loss: 0.0835 - accuracy: 0.9789 - val_loss: 0.0730 - val_accuracy: 0.9860
Epoch 211/300
7/7 [==============================] - 0s 10ms/step - loss: 0.0956 - accuracy: 0.9765 - val_loss: 0.0730 - val_accuracy: 0.9930
Epoch 212/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0819 - accuracy: 0.9812 - val_loss: 0.0731 - val_accuracy: 0.9860
Epoch 213/300
7/7 [==============================] - 0s 12ms/step - loss: 0.0910 - accuracy: 0.9671 - val_loss: 0.0721 - val_accuracy: 0.9930
Epoch 214/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0818 - accuracy: 0.9836 - val_loss: 0.0722 - val_accuracy: 0.9930
Epoch 215/300
7/7 [==============================] - 0s 12ms/step - loss: 0.0819 - accuracy: 0.9859 - val_loss: 0.0717 - val_accuracy: 0.9930
Epoch 216/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0825 - accuracy: 0.9836 - val_loss: 0.0716 - val_accuracy: 0.9930
Epoch 217/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0907 - accuracy: 0.9836 - val_loss: 0.0714 - val_accuracy: 0.9930
Epoch 218/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0891 - accuracy: 0.9789 - val_loss: 0.0729 - val_accuracy: 0.9790
Epoch 219/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0926 - accuracy: 0.9742 - val_loss: 0.0720 - val_accuracy: 0.9860
Epoch 220/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0894 - accuracy: 0.9765 - val_loss: 0.0720 - val_accuracy: 0.9860
Epoch 221/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0802 - accuracy: 0.9812 - val_loss: 0.0714 - val_accuracy: 0.9930
Epoch 222/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0847 - accuracy: 0.9812 - val_loss: 0.0723 - val_accuracy: 0.9790
Epoch 223/300
7/7 [==============================] - 0s 10ms/step - loss: 0.0922 - accuracy: 0.9789 - val_loss: 0.0714 - val_accuracy: 0.9860
Epoch 224/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0908 - accuracy: 0.9812 - val_loss: 0.0736 - val_accuracy: 0.9860
Epoch 225/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0829 - accuracy: 0.9765 - val_loss: 0.0708 - val_accuracy: 0.9930
Epoch 226/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0886 - accuracy: 0.9812 - val_loss: 0.0710 - val_accuracy: 0.9860
Epoch 227/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0874 - accuracy: 0.9742 - val_loss: 0.0706 - val_accuracy: 0.9930
Epoch 228/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0818 - accuracy: 0.9742 - val_loss: 0.0708 - val_accuracy: 0.9930
Epoch 229/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0890 - accuracy: 0.9765 - val_loss: 0.0708 - val_accuracy: 0.9930
Epoch 230/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0865 - accuracy: 0.9765 - val_loss: 0.0706 - val_accuracy: 0.9930
Epoch 231/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0851 - accuracy: 0.9789 - val_loss: 0.0706 - val_accuracy: 0.9930
Epoch 232/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0917 - accuracy: 0.9695 - val_loss: 0.0705 - val_accuracy: 0.9930
Epoch 233/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0808 - accuracy: 0.9836 - val_loss: 0.0725 - val_accuracy: 0.9790
Epoch 234/300
7/7 [==============================] - 0s 4ms/step - loss: 0.0850 - accuracy: 0.9765 - val_loss: 0.0701 - val_accuracy: 0.9930
Epoch 235/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0759 - accuracy: 0.9836 - val_loss: 0.0707 - val_accuracy: 0.9860
Epoch 236/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0800 - accuracy: 0.9883 - val_loss: 0.0700 - val_accuracy: 0.9860
Epoch 237/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0781 - accuracy: 0.9859 - val_loss: 0.0698 - val_accuracy: 0.9930
Epoch 238/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0830 - accuracy: 0.9812 - val_loss: 0.0696 - val_accuracy: 0.9930
Epoch 239/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0705 - accuracy: 0.9812 - val_loss: 0.0702 - val_accuracy: 0.9790
Epoch 240/300
7/7 [==============================] - 0s 4ms/step - loss: 0.0712 - accuracy: 0.9812 - val_loss: 0.0693 - val_accuracy: 0.9930
Epoch 241/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0831 - accuracy: 0.9812 - val_loss: 0.0694 - val_accuracy: 0.9860
Epoch 242/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0859 - accuracy: 0.9836 - val_loss: 0.0689 - val_accuracy: 0.9930
Epoch 243/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0781 - accuracy: 0.9789 - val_loss: 0.0690 - val_accuracy: 0.9930
Epoch 244/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0859 - accuracy: 0.9765 - val_loss: 0.0686 - val_accuracy: 0.9930
Epoch 245/300
7/7 [==============================] - 0s 9ms/step - loss: 0.0803 - accuracy: 0.9859 - val_loss: 0.0688 - val_accuracy: 0.9930
Epoch 246/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0667 - accuracy: 0.9836 - val_loss: 0.0688 - val_accuracy: 0.9860
Epoch 247/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0720 - accuracy: 0.9836 - val_loss: 0.0682 - val_accuracy: 0.9930
Epoch 248/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0659 - accuracy: 0.9859 - val_loss: 0.0686 - val_accuracy: 0.9860
Epoch 249/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0848 - accuracy: 0.9812 - val_loss: 0.0720 - val_accuracy: 0.9790
Epoch 250/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0794 - accuracy: 0.9812 - val_loss: 0.0682 - val_accuracy: 0.9930
Epoch 251/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0730 - accuracy: 0.9789 - val_loss: 0.0701 - val_accuracy: 0.9860
Epoch 252/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0739 - accuracy: 0.9859 - val_loss: 0.0731 - val_accuracy: 0.9790
Epoch 253/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0814 - accuracy: 0.9789 - val_loss: 0.0689 - val_accuracy: 0.9790
Epoch 254/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0681 - accuracy: 0.9859 - val_loss: 0.0679 - val_accuracy: 0.9930
Epoch 255/300
7/7 [==============================] - 0s 13ms/step - loss: 0.0710 - accuracy: 0.9812 - val_loss: 0.0688 - val_accuracy: 0.9860
Epoch 256/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0794 - accuracy: 0.9789 - val_loss: 0.0679 - val_accuracy: 0.9930
Epoch 257/300
7/7 [==============================] - 0s 10ms/step - loss: 0.0840 - accuracy: 0.9765 - val_loss: 0.0685 - val_accuracy: 0.9860
Epoch 258/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0776 - accuracy: 0.9789 - val_loss: 0.0728 - val_accuracy: 0.9790
Epoch 259/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0828 - accuracy: 0.9789 - val_loss: 0.0688 - val_accuracy: 0.9860
Epoch 260/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0832 - accuracy: 0.9789 - val_loss: 0.0684 - val_accuracy: 0.9860
Epoch 261/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0819 - accuracy: 0.9812 - val_loss: 0.0703 - val_accuracy: 0.9790
Epoch 262/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0883 - accuracy: 0.9742 - val_loss: 0.0681 - val_accuracy: 0.9930
Epoch 263/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0771 - accuracy: 0.9906 - val_loss: 0.0683 - val_accuracy: 0.9930
Epoch 264/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0885 - accuracy: 0.9695 - val_loss: 0.0703 - val_accuracy: 0.9790
Epoch 265/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0706 - accuracy: 0.9812 - val_loss: 0.0685 - val_accuracy: 0.9860
Epoch 266/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0753 - accuracy: 0.9836 - val_loss: 0.0681 - val_accuracy: 0.9930
Epoch 267/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0672 - accuracy: 0.9883 - val_loss: 0.0685 - val_accuracy: 0.9860
Epoch 268/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0676 - accuracy: 0.9859 - val_loss: 0.0684 - val_accuracy: 0.9860
Epoch 269/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0727 - accuracy: 0.9789 - val_loss: 0.0677 - val_accuracy: 0.9930
Epoch 270/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0776 - accuracy: 0.9836 - val_loss: 0.0676 - val_accuracy: 0.9930
Epoch 271/300
7/7 [==============================] - 0s 14ms/step - loss: 0.0719 - accuracy: 0.9906 - val_loss: 0.0693 - val_accuracy: 0.9790
Epoch 272/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0724 - accuracy: 0.9859 - val_loss: 0.0675 - val_accuracy: 0.9930
Epoch 273/300
7/7 [==============================] - 0s 11ms/step - loss: 0.0714 - accuracy: 0.9836 - val_loss: 0.0678 - val_accuracy: 0.9930
Epoch 274/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0761 - accuracy: 0.9812 - val_loss: 0.0681 - val_accuracy: 0.9860
Epoch 275/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0816 - accuracy: 0.9836 - val_loss: 0.0686 - val_accuracy: 0.9790
Epoch 276/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0753 - accuracy: 0.9836 - val_loss: 0.0677 - val_accuracy: 0.9860
Epoch 277/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0792 - accuracy: 0.9789 - val_loss: 0.0671 - val_accuracy: 0.9930
Epoch 278/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0757 - accuracy: 0.9789 - val_loss: 0.0685 - val_accuracy: 0.9790
Epoch 279/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0697 - accuracy: 0.9859 - val_loss: 0.0702 - val_accuracy: 0.9790
Epoch 280/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0787 - accuracy: 0.9765 - val_loss: 0.0671 - val_accuracy: 0.9930
Epoch 281/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0746 - accuracy: 0.9836 - val_loss: 0.0676 - val_accuracy: 0.9860
Epoch 282/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0744 - accuracy: 0.9859 - val_loss: 0.0706 - val_accuracy: 0.9790
Epoch 283/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0710 - accuracy: 0.9883 - val_loss: 0.0680 - val_accuracy: 0.9790
Epoch 284/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0702 - accuracy: 0.9859 - val_loss: 0.0670 - val_accuracy: 0.9860
Epoch 285/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0750 - accuracy: 0.9859 - val_loss: 0.0669 - val_accuracy: 0.9930
Epoch 286/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0724 - accuracy: 0.9812 - val_loss: 0.0681 - val_accuracy: 0.9790
Epoch 287/300
7/7 [==============================] - 0s 12ms/step - loss: 0.0717 - accuracy: 0.9859 - val_loss: 0.0680 - val_accuracy: 0.9790
Epoch 288/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0764 - accuracy: 0.9812 - val_loss: 0.0669 - val_accuracy: 0.9930
Epoch 289/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0769 - accuracy: 0.9812 - val_loss: 0.0669 - val_accuracy: 0.9930
Epoch 290/300
7/7 [==============================] - 0s 6ms/step - loss: 0.0872 - accuracy: 0.9671 - val_loss: 0.0703 - val_accuracy: 0.9790
Epoch 291/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0771 - accuracy: 0.9859 - val_loss: 0.0675 - val_accuracy: 0.9860
Epoch 292/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0739 - accuracy: 0.9859 - val_loss: 0.0669 - val_accuracy: 0.9860
Epoch 293/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0686 - accuracy: 0.9836 - val_loss: 0.0695 - val_accuracy: 0.9790
Epoch 294/300
7/7 [==============================] - 0s 5ms/step - loss: 0.0739 - accuracy: 0.9789 - val_loss: 0.0668 - val_accuracy: 0.9930
Epoch 295/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0739 - accuracy: 0.9836 - val_loss: 0.0675 - val_accuracy: 0.9860
Epoch 296/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0697 - accuracy: 0.9859 - val_loss: 0.0667 - val_accuracy: 0.9930
Epoch 297/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0739 - accuracy: 0.9883 - val_loss: 0.0672 - val_accuracy: 0.9860
Epoch 298/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0746 - accuracy: 0.9789 - val_loss: 0.0688 - val_accuracy: 0.9790
Epoch 299/300
7/7 [==============================] - 0s 7ms/step - loss: 0.0791 - accuracy: 0.9789 - val_loss: 0.0679 - val_accuracy: 0.9860
Epoch 300/300
7/7 [==============================] - 0s 8ms/step - loss: 0.0691 - accuracy: 0.9836 - val_loss: 0.0668 - val_accuracy: 0.9860

Plotting loss and accuracy

  • We can observe that the model correctly learns to classify between benign (0) and malign (1) after 300 epochs
  • The validation loss is lower than the training loss, probably because we are using the dropout regularization technique in the training phase of the model, hence making more robust predictions during validation than training
In [72]:
plt.plot(history.history['loss'], label='Training Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Model Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.show()
In [73]:
plt.plot(history.history['accuracy'], label='Training Acc')
plt.plot(history.history['val_accuracy'], label='Validation Acc')
plt.title('Model Acc')
plt.xlabel('Epoch')
plt.ylabel('Acc')
plt.legend()
plt.show()

Visualizing some accuracy scores to better understand the model results and convergence

In [74]:
history.history['accuracy'][0:15]
Out[74]:
[0.5492957830429077,
 0.6150234937667847,
 0.6361502408981323,
 0.67136150598526,
 0.6924882531166077,
 0.7230046987533569,
 0.737089216709137,
 0.7746478915214539,
 0.7910798192024231,
 0.7723004817962646,
 0.7840375304222107,
 0.7910798192024231,
 0.8098591566085815,
 0.8262910842895508,
 0.7957746386528015]
In [75]:
history.history['accuracy'][285:299]
Out[75]:
[0.9812206625938416,
 0.98591548204422,
 0.9812206625938416,
 0.9812206625938416,
 0.9671361446380615,
 0.98591548204422,
 0.98591548204422,
 0.9835680723190308,
 0.9788732528686523,
 0.9835680723190308,
 0.98591548204422,
 0.9882628917694092,
 0.9788732528686523,
 0.9788732528686523]

Comparing model's predictions with the test data for a visual understanding of the results

In [76]:
preds = model_1.predict(X_test)
y_pred_classes = (preds > 0.5).astype(int) # turning the predictions (probabilities) into classes (0 or 1) with a 0.5 threshold
1/5 [=====>........................] - ETA: 0s5/5 [==============================] - 0s 1ms/step
In [77]:
preds[0:10]
Out[77]:
array([[9.9982846e-01],
       [9.9974132e-01],
       [9.9156398e-01],
       [9.9484193e-01],
       [9.9979740e-01],
       [6.2453331e-08],
       [1.3036483e-06],
       [9.9852651e-01],
       [9.8689264e-01],
       [9.9840647e-01]], dtype=float32)
In [78]:
y_pred_classes[0:10]
Out[78]:
array([[1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1]])
In [79]:
y_test[0:10]
Out[79]:
target
63 1
525 1
500 1
292 1
46 1
108 0
323 0
386 1
377 1
467 1
In [80]:
conf_matrix = confusion_matrix(y_test, y_pred_classes)
In [81]:
plt.figure(figsize=(8, 6))
sns.heatmap(conf_matrix, annot=True, fmt='d', cmap='Reds', xticklabels=[0,1], yticklabels=[0,1])
plt.xlabel('Predicted Label')
plt.ylabel('True Label')
plt.title('Confusion Matrix')
plt.show()
In [ ]: