Cardiovascular diseases are the most common cause of deaths globally, taking an estimated 17.9 million lives each year, which accounts for 31% of all deaths worldwide. Heart failure is a common event caused by Cardiovascular diseases. It is characterized by the heart’s inability to pump an adequate supply of blood to the body. Without sufficient blood flow, all major body functions are disrupted. Heart failure is a condition or a collection of symptoms that weaken the heart.
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import preprocessing
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
import seaborn as sns
from keras.layers import Dense, BatchNormalization, Dropout, LSTM
from keras.models import Sequential
from keras.utils import to_categorical
from keras import callbacks
from sklearn.metrics import precision_score, recall_score, confusion_matrix, classification_report, accuracy_score, f1_score
Using TensorFlow backend.
data = pd.read_csv("Desktop\heart_failure_clinical_records_dataset.csv")
data.head()
| age | anaemia | creatinine_phosphokinase | diabetes | ejection_fraction | high_blood_pressure | platelets | serum_creatinine | serum_sodium | sex | smoking | time | DEATH_EVENT | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 75.0 | 0 | 582 | 0 | 20 | 1 | 265000.00 | 1.9 | 130 | 1 | 0 | 4 | 1 |
| 1 | 55.0 | 0 | 7861 | 0 | 38 | 0 | 263358.03 | 1.1 | 136 | 1 | 0 | 6 | 1 |
| 2 | 65.0 | 0 | 146 | 0 | 20 | 0 | 162000.00 | 1.3 | 129 | 1 | 1 | 7 | 1 |
| 3 | 50.0 | 1 | 111 | 0 | 20 | 0 | 210000.00 | 1.9 | 137 | 1 | 0 | 7 | 1 |
| 4 | 65.0 | 1 | 160 | 1 | 20 | 0 | 327000.00 | 2.7 | 116 | 0 | 0 | 8 | 1 |
data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 299 entries, 0 to 298 Data columns (total 13 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 age 299 non-null float64 1 anaemia 299 non-null int64 2 creatinine_phosphokinase 299 non-null int64 3 diabetes 299 non-null int64 4 ejection_fraction 299 non-null int64 5 high_blood_pressure 299 non-null int64 6 platelets 299 non-null float64 7 serum_creatinine 299 non-null float64 8 serum_sodium 299 non-null int64 9 sex 299 non-null int64 10 smoking 299 non-null int64 11 time 299 non-null int64 12 DEATH_EVENT 299 non-null int64 dtypes: float64(3), int64(10) memory usage: 30.5 KB
About the data:
#first of all let us evaluate the target and find out if our data is imbalanced or not
cols = ["#000080","#FF0000"]
sns.countplot(x = data["DEATH_EVENT"], palette = cols)
<AxesSubplot:xlabel='DEATH_EVENT', ylabel='count'>
cmap = sns.diverging_palette(250,150, s=50, l=65, n=9)
corrmat = data.corr()
plt.subplots(figsize=(15,15))
sns.heatmap(corrmat, cmap=cmap, annot=True, square=True)
<AxesSubplot:>
#Evaluating age distribution
plt.figure(figsize=(20,12))
Days_of_weeks = sns.countplot(x = data["age"], data=data, hue = "DEATH_EVENT", palette = cols)
Days_of_weeks.set_title("Distribution Of Age")
ERROR! Session/line number was not unique in database. History logging moved to new session 47
Text(0.5, 1.0, 'Distribution Of Age')
# Boxen and swarm plot of some non binary features.
feature = ["age","creatinine_phosphokinase","ejection_fraction","platelets","serum_creatinine","serum_sodium", "time"]
for i in feature:
plt.figure(figsize = (10,10))
sns.swarmplot(x = data["DEATH_EVENT"], y = data[i], marker="o", s=4.5, color = "Black", alpha = 0.5)
sns.boxenplot(x = data["DEATH_EVENT"], y = data[i], palette = cols)
plt.show()
sns.kdeplot(x = data["time"], y = data["age"], hue = data["DEATH_EVENT"], palette = cols)
<AxesSubplot:xlabel='time', ylabel='age'>
data.describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| age | 299.0 | 60.833893 | 11.894809 | 40.0 | 51.0 | 60.0 | 70.0 | 95.0 |
| anaemia | 299.0 | 0.431438 | 0.496107 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 |
| creatinine_phosphokinase | 299.0 | 581.839465 | 970.287881 | 23.0 | 116.5 | 250.0 | 582.0 | 7861.0 |
| diabetes | 299.0 | 0.418060 | 0.494067 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 |
| ejection_fraction | 299.0 | 38.083612 | 11.834841 | 14.0 | 30.0 | 38.0 | 45.0 | 80.0 |
| high_blood_pressure | 299.0 | 0.351171 | 0.478136 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 |
| platelets | 299.0 | 263358.029264 | 97804.236869 | 25100.0 | 212500.0 | 262000.0 | 303500.0 | 850000.0 |
| serum_creatinine | 299.0 | 1.393880 | 1.034510 | 0.5 | 0.9 | 1.1 | 1.4 | 9.4 |
| serum_sodium | 299.0 | 136.625418 | 4.412477 | 113.0 | 134.0 | 137.0 | 140.0 | 148.0 |
| sex | 299.0 | 0.648829 | 0.478136 | 0.0 | 0.0 | 1.0 | 1.0 | 1.0 |
| smoking | 299.0 | 0.321070 | 0.467670 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 |
| time | 299.0 | 130.260870 | 77.614208 | 4.0 | 73.0 | 115.0 | 203.0 | 285.0 |
| DEATH_EVENT | 299.0 | 0.321070 | 0.467670 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 |
#assigning values to features as X and target as y
X=data.drop(["DEATH_EVENT"],axis=1)
y=data["DEATH_EVENT"]
ERROR! Session/line number was not unique in database. History logging moved to new session 56
#Set up a standard scaler for the features
col_names = list(X.columns)
s_scaler = preprocessing.StandardScaler()
X_df = s_scaler.fit_transform(X)
X_df = pd.DataFrame(X_df, columns=col_names)
X_df.describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| age | 299.0 | 5.703353e-16 | 1.001676 | -1.754448 | -0.828124 | -0.070223 | 0.771889 | 2.877170 |
| anaemia | 299.0 | 1.009969e-16 | 1.001676 | -0.871105 | -0.871105 | -0.871105 | 1.147968 | 1.147968 |
| creatinine_phosphokinase | 299.0 | 0.000000e+00 | 1.001676 | -0.576918 | -0.480393 | -0.342574 | 0.000166 | 7.514640 |
| diabetes | 299.0 | 9.060014e-17 | 1.001676 | -0.847579 | -0.847579 | -0.847579 | 1.179830 | 1.179830 |
| ejection_fraction | 299.0 | -3.267546e-17 | 1.001676 | -2.038387 | -0.684180 | -0.007077 | 0.585389 | 3.547716 |
| high_blood_pressure | 299.0 | 0.000000e+00 | 1.001676 | -0.735688 | -0.735688 | -0.735688 | 1.359272 | 1.359272 |
| platelets | 299.0 | 7.723291e-17 | 1.001676 | -2.440155 | -0.520870 | -0.013908 | 0.411120 | 6.008180 |
| serum_creatinine | 299.0 | 1.425838e-16 | 1.001676 | -0.865509 | -0.478205 | -0.284552 | 0.005926 | 7.752020 |
| serum_sodium | 299.0 | -8.673849e-16 | 1.001676 | -5.363206 | -0.595996 | 0.085034 | 0.766064 | 2.582144 |
| sex | 299.0 | -8.911489e-18 | 1.001676 | -1.359272 | -1.359272 | 0.735688 | 0.735688 | 0.735688 |
| smoking | 299.0 | -1.188199e-17 | 1.001676 | -0.687682 | -0.687682 | -0.687682 | 1.454161 | 1.454161 |
| time | 299.0 | -1.901118e-16 | 1.001676 | -1.629502 | -0.739000 | -0.196954 | 0.938759 | 1.997038 |
#looking at the scaled features
colours =["#FF0000","#FFFF00","#00FF00" ,"#00FFFF", "#FF00FF"]
plt.figure(figsize=(20,10))
sns.boxenplot(data = X_df, palette = colours)
plt.xticks(rotation = 90)
plt.show()
#spliting test and training sets
X_train, X_test, y_train,y_test = train_test_split(X_df,y,test_size=0.25,random_state=7)
early_stopping = callbacks.EarlyStopping(
min_delta = 0.001,
patience = 20,
restore_best_weights = True)
# Initialising the ANN
model = Sequential()
# Layers
model.add(Dense(units = 16, kernel_initializer = "uniform", activation = "relu", input_dim = 12))
model.add(Dense(units = 8, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(0.25))
model.add(Dense(units = 4, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(0.5))
model.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'sigmoid'))
from keras.optimizers import SGD
# Compiling the ANN
model.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
# Train the ANN
history = model.fit(X_train, y_train, batch_size = 32, epochs = 500,callbacks=[early_stopping], validation_split=0.2)
Train on 179 samples, validate on 45 samples Epoch 1/500 179/179 [==============================] - 0s 1ms/step - loss: 0.6928 - accuracy: 0.6425 - val_loss: 0.6922 - val_accuracy: 0.6667 Epoch 2/500 179/179 [==============================] - 0s 50us/step - loss: 0.6920 - accuracy: 0.6480 - val_loss: 0.6912 - val_accuracy: 0.6667 Epoch 3/500 179/179 [==============================] - 0s 50us/step - loss: 0.6911 - accuracy: 0.6480 - val_loss: 0.6902 - val_accuracy: 0.6667 Epoch 4/500 179/179 [==============================] - 0s 45us/step - loss: 0.6901 - accuracy: 0.6480 - val_loss: 0.6892 - val_accuracy: 0.6667 Epoch 5/500 179/179 [==============================] - 0s 45us/step - loss: 0.6893 - accuracy: 0.6480 - val_loss: 0.6880 - val_accuracy: 0.6667 Epoch 6/500 179/179 [==============================] - 0s 45us/step - loss: 0.6883 - accuracy: 0.6480 - val_loss: 0.6869 - val_accuracy: 0.6667 Epoch 7/500 179/179 [==============================] - 0s 45us/step - loss: 0.6875 - accuracy: 0.6480 - val_loss: 0.6856 - val_accuracy: 0.6667 Epoch 8/500 179/179 [==============================] - 0s 45us/step - loss: 0.6858 - accuracy: 0.6480 - val_loss: 0.6843 - val_accuracy: 0.6667 Epoch 9/500 179/179 [==============================] - 0s 45us/step - loss: 0.6846 - accuracy: 0.6480 - val_loss: 0.6826 - val_accuracy: 0.6667 Epoch 10/500 179/179 [==============================] - 0s 50us/step - loss: 0.6834 - accuracy: 0.6480 - val_loss: 0.6806 - val_accuracy: 0.6667 Epoch 11/500 179/179 [==============================] - 0s 45us/step - loss: 0.6816 - accuracy: 0.6480 - val_loss: 0.6786 - val_accuracy: 0.6667 Epoch 12/500 179/179 [==============================] - 0s 50us/step - loss: 0.6792 - accuracy: 0.6480 - val_loss: 0.6758 - val_accuracy: 0.6667 Epoch 13/500 179/179 [==============================] - 0s 45us/step - loss: 0.6760 - accuracy: 0.6480 - val_loss: 0.6722 - val_accuracy: 0.6667 Epoch 14/500 179/179 [==============================] - 0s 45us/step - loss: 0.6719 - accuracy: 0.6480 - val_loss: 0.6678 - val_accuracy: 0.6667 Epoch 15/500 179/179 [==============================] - 0s 50us/step - loss: 0.6687 - accuracy: 0.6480 - val_loss: 0.6622 - val_accuracy: 0.6667 Epoch 16/500 179/179 [==============================] - 0s 45us/step - loss: 0.6641 - accuracy: 0.6480 - val_loss: 0.6555 - val_accuracy: 0.6667 Epoch 17/500 179/179 [==============================] - 0s 45us/step - loss: 0.6527 - accuracy: 0.6480 - val_loss: 0.6474 - val_accuracy: 0.6667 Epoch 18/500 179/179 [==============================] - 0s 50us/step - loss: 0.6406 - accuracy: 0.6480 - val_loss: 0.6375 - val_accuracy: 0.6667 Epoch 19/500 179/179 [==============================] - 0s 50us/step - loss: 0.6358 - accuracy: 0.6480 - val_loss: 0.6265 - val_accuracy: 0.6667 Epoch 20/500 179/179 [==============================] - 0s 45us/step - loss: 0.6237 - accuracy: 0.6480 - val_loss: 0.6146 - val_accuracy: 0.6667 Epoch 21/500 179/179 [==============================] - 0s 50us/step - loss: 0.6136 - accuracy: 0.6480 - val_loss: 0.6020 - val_accuracy: 0.6667 Epoch 22/500 179/179 [==============================] - 0s 50us/step - loss: 0.6029 - accuracy: 0.6480 - val_loss: 0.5891 - val_accuracy: 0.6667 Epoch 23/500 179/179 [==============================] - 0s 45us/step - loss: 0.5934 - accuracy: 0.6480 - val_loss: 0.5770 - val_accuracy: 0.6667 Epoch 24/500 179/179 [==============================] - 0s 50us/step - loss: 0.5717 - accuracy: 0.6480 - val_loss: 0.5653 - val_accuracy: 0.6667 Epoch 25/500 179/179 [==============================] - 0s 67us/step - loss: 0.5750 - accuracy: 0.6480 - val_loss: 0.5558 - val_accuracy: 0.6667 Epoch 26/500 179/179 [==============================] - 0s 61us/step - loss: 0.5369 - accuracy: 0.6480 - val_loss: 0.5472 - val_accuracy: 0.6667 Epoch 27/500 179/179 [==============================] - 0s 50us/step - loss: 0.5503 - accuracy: 0.6480 - val_loss: 0.5403 - val_accuracy: 0.6667 Epoch 28/500 179/179 [==============================] - 0s 50us/step - loss: 0.5446 - accuracy: 0.6480 - val_loss: 0.5366 - val_accuracy: 0.6667 Epoch 29/500 179/179 [==============================] - 0s 50us/step - loss: 0.5416 - accuracy: 0.6480 - val_loss: 0.5328 - val_accuracy: 0.6667 Epoch 30/500 179/179 [==============================] - 0s 39us/step - loss: 0.5550 - accuracy: 0.6704 - val_loss: 0.5299 - val_accuracy: 0.6667 Epoch 31/500 179/179 [==============================] - 0s 50us/step - loss: 0.5488 - accuracy: 0.7263 - val_loss: 0.5269 - val_accuracy: 0.6667 Epoch 32/500 179/179 [==============================] - 0s 45us/step - loss: 0.5292 - accuracy: 0.7039 - val_loss: 0.5223 - val_accuracy: 0.7111 Epoch 33/500 179/179 [==============================] - 0s 45us/step - loss: 0.5361 - accuracy: 0.7430 - val_loss: 0.5185 - val_accuracy: 0.7333 Epoch 34/500 179/179 [==============================] - 0s 50us/step - loss: 0.5326 - accuracy: 0.7430 - val_loss: 0.5153 - val_accuracy: 0.7111 Epoch 35/500 179/179 [==============================] - 0s 45us/step - loss: 0.5308 - accuracy: 0.7542 - val_loss: 0.5114 - val_accuracy: 0.7333 Epoch 36/500 179/179 [==============================] - 0s 45us/step - loss: 0.5152 - accuracy: 0.7430 - val_loss: 0.5084 - val_accuracy: 0.7556 Epoch 37/500 179/179 [==============================] - 0s 50us/step - loss: 0.5210 - accuracy: 0.7430 - val_loss: 0.5062 - val_accuracy: 0.8000 Epoch 38/500 179/179 [==============================] - 0s 50us/step - loss: 0.5125 - accuracy: 0.7654 - val_loss: 0.5052 - val_accuracy: 0.8000 Epoch 39/500 179/179 [==============================] - 0s 50us/step - loss: 0.4906 - accuracy: 0.7933 - val_loss: 0.5041 - val_accuracy: 0.8000 Epoch 40/500 179/179 [==============================] - 0s 45us/step - loss: 0.5023 - accuracy: 0.7709 - val_loss: 0.5030 - val_accuracy: 0.8000 Epoch 41/500 179/179 [==============================] - 0s 45us/step - loss: 0.5138 - accuracy: 0.8156 - val_loss: 0.5011 - val_accuracy: 0.8222 Epoch 42/500 179/179 [==============================] - 0s 39us/step - loss: 0.5015 - accuracy: 0.7765 - val_loss: 0.4977 - val_accuracy: 0.8222 Epoch 43/500 179/179 [==============================] - 0s 39us/step - loss: 0.4787 - accuracy: 0.8212 - val_loss: 0.4944 - val_accuracy: 0.8000 Epoch 44/500 179/179 [==============================] - 0s 45us/step - loss: 0.4935 - accuracy: 0.7542 - val_loss: 0.4916 - val_accuracy: 0.8000 Epoch 45/500 179/179 [==============================] - 0s 45us/step - loss: 0.4716 - accuracy: 0.8101 - val_loss: 0.4910 - val_accuracy: 0.8000 Epoch 46/500 179/179 [==============================] - 0s 45us/step - loss: 0.4768 - accuracy: 0.8156 - val_loss: 0.4900 - val_accuracy: 0.8000 Epoch 47/500 179/179 [==============================] - 0s 39us/step - loss: 0.4824 - accuracy: 0.7933 - val_loss: 0.4902 - val_accuracy: 0.8000 Epoch 48/500 179/179 [==============================] - 0s 45us/step - loss: 0.4764 - accuracy: 0.8045 - val_loss: 0.4890 - val_accuracy: 0.8000 Epoch 49/500 179/179 [==============================] - 0s 39us/step - loss: 0.4644 - accuracy: 0.8324 - val_loss: 0.4888 - val_accuracy: 0.8222 Epoch 50/500 179/179 [==============================] - 0s 45us/step - loss: 0.4933 - accuracy: 0.7709 - val_loss: 0.4875 - val_accuracy: 0.8222 Epoch 51/500 179/179 [==============================] - 0s 50us/step - loss: 0.4360 - accuracy: 0.8156 - val_loss: 0.4874 - val_accuracy: 0.8222 Epoch 52/500 179/179 [==============================] - 0s 45us/step - loss: 0.4715 - accuracy: 0.7933 - val_loss: 0.4882 - val_accuracy: 0.8222 Epoch 53/500 179/179 [==============================] - 0s 56us/step - loss: 0.4684 - accuracy: 0.7933 - val_loss: 0.4881 - val_accuracy: 0.8222 Epoch 54/500 179/179 [==============================] - 0s 50us/step - loss: 0.4588 - accuracy: 0.8212 - val_loss: 0.4881 - val_accuracy: 0.8222 Epoch 55/500 179/179 [==============================] - 0s 45us/step - loss: 0.4544 - accuracy: 0.7821 - val_loss: 0.4886 - val_accuracy: 0.8000 Epoch 56/500 179/179 [==============================] - 0s 50us/step - loss: 0.4846 - accuracy: 0.7877 - val_loss: 0.4875 - val_accuracy: 0.8000 Epoch 57/500 179/179 [==============================] - 0s 39us/step - loss: 0.4262 - accuracy: 0.8156 - val_loss: 0.4865 - val_accuracy: 0.8222 Epoch 58/500 179/179 [==============================] - 0s 45us/step - loss: 0.4471 - accuracy: 0.8268 - val_loss: 0.4849 - val_accuracy: 0.8222 Epoch 59/500 179/179 [==============================] - 0s 45us/step - loss: 0.4192 - accuracy: 0.7989 - val_loss: 0.4842 - val_accuracy: 0.8222 Epoch 60/500 179/179 [==============================] - 0s 39us/step - loss: 0.4718 - accuracy: 0.8212 - val_loss: 0.4833 - val_accuracy: 0.8222 Epoch 61/500 179/179 [==============================] - 0s 45us/step - loss: 0.4486 - accuracy: 0.8156 - val_loss: 0.4821 - val_accuracy: 0.8222 Epoch 62/500 179/179 [==============================] - 0s 39us/step - loss: 0.4463 - accuracy: 0.7933 - val_loss: 0.4796 - val_accuracy: 0.8222 Epoch 63/500 179/179 [==============================] - 0s 45us/step - loss: 0.5011 - accuracy: 0.7877 - val_loss: 0.4781 - val_accuracy: 0.8222 Epoch 64/500 179/179 [==============================] - ETA: 0s - loss: 0.4291 - accuracy: 0.81 - 0s 45us/step - loss: 0.4378 - accuracy: 0.8212 - val_loss: 0.4786 - val_accuracy: 0.8222 Epoch 65/500 179/179 [==============================] - 0s 50us/step - loss: 0.4881 - accuracy: 0.8212 - val_loss: 0.4767 - val_accuracy: 0.8222 Epoch 66/500 179/179 [==============================] - 0s 45us/step - loss: 0.4707 - accuracy: 0.8156 - val_loss: 0.4760 - val_accuracy: 0.8222 Epoch 67/500 179/179 [==============================] - 0s 45us/step - loss: 0.4376 - accuracy: 0.8045 - val_loss: 0.4756 - val_accuracy: 0.8222 Epoch 68/500 179/179 [==============================] - 0s 50us/step - loss: 0.4238 - accuracy: 0.8268 - val_loss: 0.4773 - val_accuracy: 0.8222 Epoch 69/500 179/179 [==============================] - 0s 45us/step - loss: 0.4367 - accuracy: 0.8436 - val_loss: 0.4779 - val_accuracy: 0.8222 Epoch 70/500 179/179 [==============================] - 0s 50us/step - loss: 0.4601 - accuracy: 0.7989 - val_loss: 0.4784 - val_accuracy: 0.8222 Epoch 71/500 179/179 [==============================] - 0s 45us/step - loss: 0.4632 - accuracy: 0.7877 - val_loss: 0.4770 - val_accuracy: 0.8222 Epoch 72/500 179/179 [==============================] - 0s 50us/step - loss: 0.4282 - accuracy: 0.8212 - val_loss: 0.4773 - val_accuracy: 0.8222 Epoch 73/500 179/179 [==============================] - 0s 45us/step - loss: 0.4329 - accuracy: 0.8436 - val_loss: 0.4768 - val_accuracy: 0.8222 Epoch 74/500 179/179 [==============================] - 0s 39us/step - loss: 0.4578 - accuracy: 0.8101 - val_loss: 0.4759 - val_accuracy: 0.8222 Epoch 75/500 179/179 [==============================] - 0s 45us/step - loss: 0.4701 - accuracy: 0.7989 - val_loss: 0.4756 - val_accuracy: 0.8222 Epoch 76/500 179/179 [==============================] - 0s 45us/step - loss: 0.4314 - accuracy: 0.8324 - val_loss: 0.4754 - val_accuracy: 0.8222 Epoch 77/500 179/179 [==============================] - 0s 50us/step - loss: 0.4279 - accuracy: 0.8101 - val_loss: 0.4749 - val_accuracy: 0.8222 Epoch 78/500 179/179 [==============================] - 0s 50us/step - loss: 0.4386 - accuracy: 0.8156 - val_loss: 0.4739 - val_accuracy: 0.8222 Epoch 79/500 179/179 [==============================] - 0s 39us/step - loss: 0.4334 - accuracy: 0.7877 - val_loss: 0.4727 - val_accuracy: 0.8222 Epoch 80/500 179/179 [==============================] - 0s 45us/step - loss: 0.4583 - accuracy: 0.8156 - val_loss: 0.4732 - val_accuracy: 0.8222 Epoch 81/500 179/179 [==============================] - 0s 39us/step - loss: 0.4145 - accuracy: 0.7933 - val_loss: 0.4741 - val_accuracy: 0.8222 Epoch 82/500 179/179 [==============================] - 0s 45us/step - loss: 0.4100 - accuracy: 0.8268 - val_loss: 0.4748 - val_accuracy: 0.8222 Epoch 83/500 179/179 [==============================] - 0s 39us/step - loss: 0.4296 - accuracy: 0.8268 - val_loss: 0.4765 - val_accuracy: 0.8222 Epoch 84/500 179/179 [==============================] - 0s 39us/step - loss: 0.4375 - accuracy: 0.8324 - val_loss: 0.4779 - val_accuracy: 0.8222 Epoch 85/500 179/179 [==============================] - 0s 39us/step - loss: 0.4347 - accuracy: 0.8603 - val_loss: 0.4795 - val_accuracy: 0.8222 Epoch 86/500 179/179 [==============================] - 0s 45us/step - loss: 0.4434 - accuracy: 0.8492 - val_loss: 0.4779 - val_accuracy: 0.8222 Epoch 87/500 179/179 [==============================] - 0s 45us/step - loss: 0.4662 - accuracy: 0.7709 - val_loss: 0.4751 - val_accuracy: 0.8222 Epoch 88/500 179/179 [==============================] - 0s 45us/step - loss: 0.3888 - accuracy: 0.8212 - val_loss: 0.4731 - val_accuracy: 0.8222 Epoch 89/500 179/179 [==============================] - 0s 45us/step - loss: 0.4636 - accuracy: 0.7542 - val_loss: 0.4734 - val_accuracy: 0.8222 Epoch 90/500 179/179 [==============================] - 0s 45us/step - loss: 0.4496 - accuracy: 0.8380 - val_loss: 0.4742 - val_accuracy: 0.8000 Epoch 91/500 179/179 [==============================] - 0s 45us/step - loss: 0.4118 - accuracy: 0.8659 - val_loss: 0.4730 - val_accuracy: 0.8222 Epoch 92/500 179/179 [==============================] - 0s 45us/step - loss: 0.4347 - accuracy: 0.8156 - val_loss: 0.4713 - val_accuracy: 0.8222 Epoch 93/500 179/179 [==============================] - 0s 43us/step - loss: 0.4385 - accuracy: 0.8492 - val_loss: 0.4712 - val_accuracy: 0.8222 Epoch 94/500 179/179 [==============================] - 0s 45us/step - loss: 0.3707 - accuracy: 0.8380 - val_loss: 0.4717 - val_accuracy: 0.8222 Epoch 95/500 179/179 [==============================] - 0s 45us/step - loss: 0.4644 - accuracy: 0.7989 - val_loss: 0.4733 - val_accuracy: 0.8222 Epoch 96/500 179/179 [==============================] - 0s 45us/step - loss: 0.4458 - accuracy: 0.8045 - val_loss: 0.4734 - val_accuracy: 0.8222 Epoch 97/500 179/179 [==============================] - 0s 45us/step - loss: 0.4017 - accuracy: 0.8101 - val_loss: 0.4748 - val_accuracy: 0.8222 Epoch 98/500 179/179 [==============================] - 0s 45us/step - loss: 0.4157 - accuracy: 0.8436 - val_loss: 0.4768 - val_accuracy: 0.8222 Epoch 99/500 179/179 [==============================] - 0s 45us/step - loss: 0.4628 - accuracy: 0.8101 - val_loss: 0.4758 - val_accuracy: 0.8222 Epoch 100/500 179/179 [==============================] - 0s 44us/step - loss: 0.4317 - accuracy: 0.8268 - val_loss: 0.4754 - val_accuracy: 0.8222 Epoch 101/500 179/179 [==============================] - 0s 50us/step - loss: 0.4243 - accuracy: 0.8212 - val_loss: 0.4746 - val_accuracy: 0.8222 Epoch 102/500 179/179 [==============================] - 0s 45us/step - loss: 0.3967 - accuracy: 0.8268 - val_loss: 0.4747 - val_accuracy: 0.8222 Epoch 103/500 179/179 [==============================] - 0s 45us/step - loss: 0.4199 - accuracy: 0.8101 - val_loss: 0.4763 - val_accuracy: 0.8222 Epoch 104/500 179/179 [==============================] - 0s 45us/step - loss: 0.4141 - accuracy: 0.8380 - val_loss: 0.4770 - val_accuracy: 0.8222 Epoch 105/500 179/179 [==============================] - 0s 50us/step - loss: 0.4497 - accuracy: 0.7765 - val_loss: 0.4780 - val_accuracy: 0.8222 Epoch 106/500 179/179 [==============================] - 0s 45us/step - loss: 0.3986 - accuracy: 0.8212 - val_loss: 0.4776 - val_accuracy: 0.8222 Epoch 107/500 179/179 [==============================] - 0s 45us/step - loss: 0.4229 - accuracy: 0.8156 - val_loss: 0.4771 - val_accuracy: 0.8222 Epoch 108/500 179/179 [==============================] - 0s 45us/step - loss: 0.3862 - accuracy: 0.8101 - val_loss: 0.4784 - val_accuracy: 0.8222 Epoch 109/500 179/179 [==============================] - 0s 45us/step - loss: 0.4358 - accuracy: 0.7989 - val_loss: 0.4801 - val_accuracy: 0.8222 Epoch 110/500 179/179 [==============================] - 0s 39us/step - loss: 0.3982 - accuracy: 0.8101 - val_loss: 0.4817 - val_accuracy: 0.8222 Epoch 111/500 179/179 [==============================] - 0s 45us/step - loss: 0.4159 - accuracy: 0.8715 - val_loss: 0.4809 - val_accuracy: 0.8222 Epoch 112/500 179/179 [==============================] - 0s 45us/step - loss: 0.4330 - accuracy: 0.8268 - val_loss: 0.4802 - val_accuracy: 0.8222
val_accuracy = np.mean(history.history['val_accuracy'])
print("\n%s: %.2f%%" % ('val_accuracy', val_accuracy*100))
val_accuracy: 77.24%
history_df = pd.DataFrame(history.history)
plt.plot(history_df.loc[:, ['loss']], "#6daa9f", label='Training loss')
plt.plot(history_df.loc[:, ['val_loss']],"#774571", label='Validation loss')
plt.title('Training and Validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend(loc="best")
plt.show()
ERROR! Session/line number was not unique in database. History logging moved to new session 59
history_df = pd.DataFrame(history.history)
plt.plot(history_df.loc[:, ['accuracy']], "#6daa9f", label='Training accuracy')
plt.plot(history_df.loc[:, ['val_accuracy']], "#774571", label='Validation accuracy')
plt.title('Training and Validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Predicting the test set results
y_pred = model.predict(X_test)
y_pred = (y_pred > 0.5)
np.set_printoptions()
# confusion matrix
cmap1 = sns.diverging_palette(275,150, s=40, l=65, n=6)
plt.subplots(figsize=(12,8))
cf_matrix = confusion_matrix(y_test, y_pred)
sns.heatmap(cf_matrix/np.sum(cf_matrix), cmap = cmap1, annot = True, annot_kws = {'size':15})
<AxesSubplot:>
print(classification_report(y_test, y_pred))
precision recall f1-score support
0 0.86 0.88 0.87 57
1 0.59 0.56 0.57 18
accuracy 0.80 75
macro avg 0.73 0.72 0.72 75
weighted avg 0.80 0.80 0.80 75