from google.colab import drive
drive.mount('/content/drive')
%cd /content/drive/MyDrive/Datasets/
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True). /content/drive/MyDrive/Datasets
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from keras.models import Sequential
from keras.layers import Dense
import matplotlib.pyplot as plt
import seaborn as sns
dataset = pd.read_csv('Diabetes.tsv', sep='\t')
dataset.head()
Preg | GLU | BP | ST | INS | BMI | DPF | Age | Outcome | |
---|---|---|---|---|---|---|---|---|---|
0 | 6 | 148 | 72 | 35 | 0 | 33.6 | 0.627 | 50 | 1 |
1 | 1 | 85 | 66 | 29 | 0 | 26.6 | 0.351 | 31 | 0 |
2 | 8 | 183 | 64 | 0 | 0 | 23.3 | 0.672 | 32 | 1 |
3 | 1 | 89 | 66 | 23 | 94 | 28.1 | 0.167 | 21 | 0 |
4 | 0 | 137 | 40 | 35 | 168 | 43.1 | 2.288 | 33 | 1 |
dataset.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 768 entries, 0 to 767 Data columns (total 9 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Preg 768 non-null int64 1 GLU 768 non-null int64 2 BP 768 non-null int64 3 ST 768 non-null int64 4 INS 768 non-null int64 5 BMI 768 non-null float64 6 DPF 768 non-null float64 7 Age 768 non-null int64 8 Outcome 768 non-null int64 dtypes: float64(2), int64(7) memory usage: 54.1 KB
dataset.isnull().sum()
Preg 0 GLU 0 BP 0 ST 0 INS 0 BMI 0 DPF 0 Age 0 Outcome 0 dtype: int64
f, ax = plt.subplots(1, 2, figsize = (12, 6))
f.suptitle("Diabetes?", fontsize = 18.)
_ = dataset.Outcome.value_counts().plot.bar(ax = ax[0], rot = 0,
color = (sns.color_palette()[0], sns.color_palette()[2])).set(xticklabels = ["No", "Yes"])
_ = dataset.Outcome.value_counts().plot.pie(labels = ("No", "Yes"), autopct = "%.2f%%",
label = "", fontsize = 13., ax = ax[1],\
colors = (sns.color_palette()[0], sns.color_palette()[2]), wedgeprops = {"linewidth": 1.5, "edgecolor": "#F7F7F7"}),
ax[1].texts[1].set_color("#F7F7F7"), ax[1].texts[3].set_color("#F7F7F7")
(None, None)
fig, ax = plt.subplots(4,2, figsize=(16,16))
sns.distplot(dataset.Age, bins = 20, ax=ax[0,0])
sns.distplot(dataset.Preg, bins = 20, ax=ax[0,1])
sns.distplot(dataset.GLU, bins = 20, ax=ax[1,0])
sns.distplot(dataset.BP, bins = 20, ax=ax[1,1])
sns.distplot(dataset.ST, bins = 20, ax=ax[2,0])
sns.distplot(dataset.INS, bins = 20, ax=ax[2,1])
sns.distplot(dataset.BMI, bins = 20, ax=ax[3,0])
sns.distplot(dataset.DPF, bins = 20, ax=ax[3,1])
/usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) /usr/local/lib/python3.7/dist-packages/seaborn/distributions.py:2619: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning)
<matplotlib.axes._subplots.AxesSubplot at 0x7fa5ade4c650>
corr=dataset.corr()
sns.set(font_scale=1.15)
plt.figure(figsize=(14, 10))
sns.heatmap(corr, vmax=.8, linewidths=0.01,
square=True,annot=True,cmap='YlGnBu',linecolor="black")
plt.title('Correlation between features');
sns.pairplot(data=dataset, hue='Outcome')
plt.show()
x = dataset.iloc[:,0:8].values
y = dataset.iloc[:, -1].values
x
array([[ 6. , 148. , 72. , ..., 33.6 , 0.627, 50. ], [ 1. , 85. , 66. , ..., 26.6 , 0.351, 31. ], [ 8. , 183. , 64. , ..., 23.3 , 0.672, 32. ], ..., [ 5. , 121. , 72. , ..., 26.2 , 0.245, 30. ], [ 1. , 126. , 60. , ..., 30.1 , 0.349, 47. ], [ 1. , 93. , 70. , ..., 30.4 , 0.315, 23. ]])
y
array([1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0])
# from sklearn.preprocessing import MinMaxScaler
# sc = MinMaxScaler()
# x = sc.fit_transform(x)
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
x = sc.fit_transform(x)
x
array([[ 0.63994726, 0.84832379, 0.14964075, ..., 0.20401277, 0.46849198, 1.4259954 ], [-0.84488505, -1.12339636, -0.16054575, ..., -0.68442195, -0.36506078, -0.19067191], [ 1.23388019, 1.94372388, -0.26394125, ..., -1.10325546, 0.60439732, -0.10558415], ..., [ 0.3429808 , 0.00330087, 0.14964075, ..., -0.73518964, -0.68519336, -0.27575966], [-0.84488505, 0.1597866 , -0.47073225, ..., -0.24020459, -0.37110101, 1.17073215], [-0.84488505, -0.8730192 , 0.04624525, ..., -0.20212881, -0.47378505, -0.87137393]])
x.shape
(768, 8)
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.20, random_state=12)
x_test.shape
(154, 8)
y_test.shape
(154,)
model = Sequential()
model.add(Dense(32, input_dim=8, activation='softmax'))
model.add(Dense(16, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='MSE', optimizer='adam', metrics=['accuracy'])
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_4 (Dense) (None, 32) 288 dense_5 (Dense) (None, 16) 528 dense_6 (Dense) (None, 8) 136 dense_7 (Dense) (None, 1) 9 ================================================================= Total params: 961 Trainable params: 961 Non-trainable params: 0 _________________________________________________________________
from keras.utils.vis_utils import plot_model
from IPython.display import Image
plot_model(model, to_file='nn_model_plot.png', show_shapes=True, show_layer_names=True)
Image(retina=True, filename='nn_model_plot.png')
hist = model.fit(x_train,y_train, epochs =100,validation_data=(x_test, y_test))
Epoch 1/100 20/20 [==============================] - 2s 21ms/step - loss: 0.2454 - accuracy: 0.6498 - val_loss: 0.2398 - val_accuracy: 0.6429 Epoch 2/100 20/20 [==============================] - 0s 9ms/step - loss: 0.2335 - accuracy: 0.6531 - val_loss: 0.2296 - val_accuracy: 0.6429 Epoch 3/100 20/20 [==============================] - 0s 7ms/step - loss: 0.2245 - accuracy: 0.6531 - val_loss: 0.2234 - val_accuracy: 0.6429 Epoch 4/100 20/20 [==============================] - 0s 9ms/step - loss: 0.2186 - accuracy: 0.6531 - val_loss: 0.2194 - val_accuracy: 0.6429 Epoch 5/100 20/20 [==============================] - 0s 7ms/step - loss: 0.2143 - accuracy: 0.6531 - val_loss: 0.2149 - val_accuracy: 0.6429 Epoch 6/100 20/20 [==============================] - 0s 9ms/step - loss: 0.2085 - accuracy: 0.6531 - val_loss: 0.2092 - val_accuracy: 0.6429 Epoch 7/100 20/20 [==============================] - 0s 10ms/step - loss: 0.2028 - accuracy: 0.6531 - val_loss: 0.2030 - val_accuracy: 0.6429 Epoch 8/100 20/20 [==============================] - 0s 12ms/step - loss: 0.1970 - accuracy: 0.6531 - val_loss: 0.1969 - val_accuracy: 0.6429 Epoch 9/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1912 - accuracy: 0.6629 - val_loss: 0.1905 - val_accuracy: 0.6688 Epoch 10/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1846 - accuracy: 0.7052 - val_loss: 0.1836 - val_accuracy: 0.7403 Epoch 11/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1791 - accuracy: 0.7508 - val_loss: 0.1788 - val_accuracy: 0.7792 Epoch 12/100 20/20 [==============================] - 0s 8ms/step - loss: 0.1745 - accuracy: 0.7524 - val_loss: 0.1742 - val_accuracy: 0.7857 Epoch 13/100 20/20 [==============================] - 0s 13ms/step - loss: 0.1706 - accuracy: 0.7622 - val_loss: 0.1708 - val_accuracy: 0.7987 Epoch 14/100 20/20 [==============================] - 0s 8ms/step - loss: 0.1674 - accuracy: 0.7671 - val_loss: 0.1680 - val_accuracy: 0.8052 Epoch 15/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1649 - accuracy: 0.7671 - val_loss: 0.1657 - val_accuracy: 0.8182 Epoch 16/100 20/20 [==============================] - 0s 17ms/step - loss: 0.1626 - accuracy: 0.7752 - val_loss: 0.1631 - val_accuracy: 0.8182 Epoch 17/100 20/20 [==============================] - 0s 18ms/step - loss: 0.1604 - accuracy: 0.7736 - val_loss: 0.1613 - val_accuracy: 0.8117 Epoch 18/100 20/20 [==============================] - 0s 19ms/step - loss: 0.1588 - accuracy: 0.7785 - val_loss: 0.1594 - val_accuracy: 0.7987 Epoch 19/100 20/20 [==============================] - 0s 15ms/step - loss: 0.1572 - accuracy: 0.7769 - val_loss: 0.1578 - val_accuracy: 0.8117 Epoch 20/100 20/20 [==============================] - 0s 13ms/step - loss: 0.1555 - accuracy: 0.7801 - val_loss: 0.1569 - val_accuracy: 0.7922 Epoch 21/100 20/20 [==============================] - 0s 16ms/step - loss: 0.1542 - accuracy: 0.7834 - val_loss: 0.1555 - val_accuracy: 0.7922 Epoch 22/100 20/20 [==============================] - 0s 16ms/step - loss: 0.1534 - accuracy: 0.7769 - val_loss: 0.1545 - val_accuracy: 0.7922 Epoch 23/100 20/20 [==============================] - 0s 13ms/step - loss: 0.1528 - accuracy: 0.7915 - val_loss: 0.1547 - val_accuracy: 0.7857 Epoch 24/100 20/20 [==============================] - 0s 15ms/step - loss: 0.1524 - accuracy: 0.7850 - val_loss: 0.1530 - val_accuracy: 0.7922 Epoch 25/100 20/20 [==============================] - 0s 9ms/step - loss: 0.1512 - accuracy: 0.7866 - val_loss: 0.1529 - val_accuracy: 0.7987 Epoch 26/100 20/20 [==============================] - 0s 11ms/step - loss: 0.1506 - accuracy: 0.7850 - val_loss: 0.1515 - val_accuracy: 0.7922 Epoch 27/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1501 - accuracy: 0.7834 - val_loss: 0.1524 - val_accuracy: 0.7987 Epoch 28/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1501 - accuracy: 0.7850 - val_loss: 0.1515 - val_accuracy: 0.7987 Epoch 29/100 20/20 [==============================] - 0s 11ms/step - loss: 0.1489 - accuracy: 0.7834 - val_loss: 0.1512 - val_accuracy: 0.7922 Epoch 30/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1491 - accuracy: 0.7818 - val_loss: 0.1507 - val_accuracy: 0.7922 Epoch 31/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1484 - accuracy: 0.7850 - val_loss: 0.1509 - val_accuracy: 0.7987 Epoch 32/100 20/20 [==============================] - 0s 14ms/step - loss: 0.1483 - accuracy: 0.7883 - val_loss: 0.1506 - val_accuracy: 0.7987 Epoch 33/100 20/20 [==============================] - 0s 12ms/step - loss: 0.1480 - accuracy: 0.7883 - val_loss: 0.1503 - val_accuracy: 0.7987 Epoch 34/100 20/20 [==============================] - 0s 15ms/step - loss: 0.1477 - accuracy: 0.7850 - val_loss: 0.1497 - val_accuracy: 0.7987 Epoch 35/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1476 - accuracy: 0.7834 - val_loss: 0.1487 - val_accuracy: 0.8052 Epoch 36/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1476 - accuracy: 0.7785 - val_loss: 0.1491 - val_accuracy: 0.7987 Epoch 37/100 20/20 [==============================] - 0s 11ms/step - loss: 0.1475 - accuracy: 0.7818 - val_loss: 0.1487 - val_accuracy: 0.7987 Epoch 38/100 20/20 [==============================] - 0s 14ms/step - loss: 0.1469 - accuracy: 0.7850 - val_loss: 0.1479 - val_accuracy: 0.8052 Epoch 39/100 20/20 [==============================] - 0s 15ms/step - loss: 0.1467 - accuracy: 0.7850 - val_loss: 0.1482 - val_accuracy: 0.7987 Epoch 40/100 20/20 [==============================] - 0s 12ms/step - loss: 0.1467 - accuracy: 0.7866 - val_loss: 0.1482 - val_accuracy: 0.7987 Epoch 41/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1466 - accuracy: 0.7883 - val_loss: 0.1479 - val_accuracy: 0.8052 Epoch 42/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1461 - accuracy: 0.7883 - val_loss: 0.1484 - val_accuracy: 0.8052 Epoch 43/100 20/20 [==============================] - 0s 12ms/step - loss: 0.1462 - accuracy: 0.7899 - val_loss: 0.1487 - val_accuracy: 0.7987 Epoch 44/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1456 - accuracy: 0.7883 - val_loss: 0.1476 - val_accuracy: 0.8117 Epoch 45/100 20/20 [==============================] - 0s 9ms/step - loss: 0.1460 - accuracy: 0.7932 - val_loss: 0.1477 - val_accuracy: 0.8117 Epoch 46/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1451 - accuracy: 0.7915 - val_loss: 0.1485 - val_accuracy: 0.7987 Epoch 47/100 20/20 [==============================] - 0s 9ms/step - loss: 0.1453 - accuracy: 0.7801 - val_loss: 0.1491 - val_accuracy: 0.8052 Epoch 48/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1455 - accuracy: 0.7915 - val_loss: 0.1480 - val_accuracy: 0.8052 Epoch 49/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1454 - accuracy: 0.7883 - val_loss: 0.1489 - val_accuracy: 0.7987 Epoch 50/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1451 - accuracy: 0.7915 - val_loss: 0.1480 - val_accuracy: 0.8052 Epoch 51/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1446 - accuracy: 0.7915 - val_loss: 0.1477 - val_accuracy: 0.8052 Epoch 52/100 20/20 [==============================] - 0s 9ms/step - loss: 0.1442 - accuracy: 0.7964 - val_loss: 0.1474 - val_accuracy: 0.8052 Epoch 53/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1444 - accuracy: 0.7948 - val_loss: 0.1479 - val_accuracy: 0.8052 Epoch 54/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1439 - accuracy: 0.7964 - val_loss: 0.1476 - val_accuracy: 0.8052 Epoch 55/100 20/20 [==============================] - 0s 7ms/step - loss: 0.1438 - accuracy: 0.7964 - val_loss: 0.1476 - val_accuracy: 0.8117 Epoch 56/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1436 - accuracy: 0.7948 - val_loss: 0.1474 - val_accuracy: 0.8052 Epoch 57/100 20/20 [==============================] - 0s 8ms/step - loss: 0.1433 - accuracy: 0.7980 - val_loss: 0.1472 - val_accuracy: 0.8117 Epoch 58/100 20/20 [==============================] - 0s 10ms/step - loss: 0.1435 - accuracy: 0.7948 - val_loss: 0.1468 - val_accuracy: 0.8117 Epoch 59/100 20/20 [==============================] - 0s 8ms/step - loss: 0.1437 - accuracy: 0.7915 - val_loss: 0.1476 - val_accuracy: 0.8117 Epoch 60/100 20/20 [==============================] - 0s 9ms/step - loss: 0.1428 - accuracy: 0.8013 - val_loss: 0.1471 - val_accuracy: 0.8052 Epoch 61/100 20/20 [==============================] - 0s 6ms/step - loss: 0.1424 - accuracy: 0.7964 - val_loss: 0.1472 - val_accuracy: 0.8052 Epoch 62/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1429 - accuracy: 0.7980 - val_loss: 0.1470 - val_accuracy: 0.8117 Epoch 63/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1427 - accuracy: 0.7980 - val_loss: 0.1464 - val_accuracy: 0.8117 Epoch 64/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1432 - accuracy: 0.7964 - val_loss: 0.1463 - val_accuracy: 0.8117 Epoch 65/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1419 - accuracy: 0.8013 - val_loss: 0.1464 - val_accuracy: 0.8117 Epoch 66/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1418 - accuracy: 0.8013 - val_loss: 0.1458 - val_accuracy: 0.8117 Epoch 67/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1414 - accuracy: 0.8013 - val_loss: 0.1461 - val_accuracy: 0.8117 Epoch 68/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1415 - accuracy: 0.7997 - val_loss: 0.1461 - val_accuracy: 0.8117 Epoch 69/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1415 - accuracy: 0.8013 - val_loss: 0.1460 - val_accuracy: 0.8117 Epoch 70/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1412 - accuracy: 0.8013 - val_loss: 0.1464 - val_accuracy: 0.8117 Epoch 71/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1408 - accuracy: 0.8029 - val_loss: 0.1465 - val_accuracy: 0.8117 Epoch 72/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1412 - accuracy: 0.8013 - val_loss: 0.1461 - val_accuracy: 0.8117 Epoch 73/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1404 - accuracy: 0.8062 - val_loss: 0.1467 - val_accuracy: 0.8117 Epoch 74/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1406 - accuracy: 0.8046 - val_loss: 0.1459 - val_accuracy: 0.8117 Epoch 75/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1404 - accuracy: 0.7980 - val_loss: 0.1461 - val_accuracy: 0.8117 Epoch 76/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1403 - accuracy: 0.8013 - val_loss: 0.1457 - val_accuracy: 0.8052 Epoch 77/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1400 - accuracy: 0.8013 - val_loss: 0.1453 - val_accuracy: 0.8182 Epoch 78/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1400 - accuracy: 0.8029 - val_loss: 0.1454 - val_accuracy: 0.8247 Epoch 79/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1395 - accuracy: 0.7997 - val_loss: 0.1449 - val_accuracy: 0.8117 Epoch 80/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1398 - accuracy: 0.8046 - val_loss: 0.1452 - val_accuracy: 0.8117 Epoch 81/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1390 - accuracy: 0.8029 - val_loss: 0.1451 - val_accuracy: 0.8312 Epoch 82/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1392 - accuracy: 0.7980 - val_loss: 0.1452 - val_accuracy: 0.8312 Epoch 83/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1391 - accuracy: 0.8029 - val_loss: 0.1454 - val_accuracy: 0.8312 Epoch 84/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1390 - accuracy: 0.8013 - val_loss: 0.1452 - val_accuracy: 0.8312 Epoch 85/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1386 - accuracy: 0.8013 - val_loss: 0.1452 - val_accuracy: 0.8247 Epoch 86/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1401 - accuracy: 0.8029 - val_loss: 0.1452 - val_accuracy: 0.8117 Epoch 87/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1386 - accuracy: 0.7948 - val_loss: 0.1456 - val_accuracy: 0.8117 Epoch 88/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1391 - accuracy: 0.8046 - val_loss: 0.1440 - val_accuracy: 0.8247 Epoch 89/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1386 - accuracy: 0.8046 - val_loss: 0.1442 - val_accuracy: 0.8247 Epoch 90/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1384 - accuracy: 0.8078 - val_loss: 0.1440 - val_accuracy: 0.8247 Epoch 91/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1377 - accuracy: 0.8013 - val_loss: 0.1443 - val_accuracy: 0.8312 Epoch 92/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1375 - accuracy: 0.8013 - val_loss: 0.1447 - val_accuracy: 0.8247 Epoch 93/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1376 - accuracy: 0.8046 - val_loss: 0.1446 - val_accuracy: 0.8312 Epoch 94/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1373 - accuracy: 0.7997 - val_loss: 0.1449 - val_accuracy: 0.8247 Epoch 95/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1375 - accuracy: 0.8062 - val_loss: 0.1452 - val_accuracy: 0.8182 Epoch 96/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1372 - accuracy: 0.8013 - val_loss: 0.1443 - val_accuracy: 0.8247 Epoch 97/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1368 - accuracy: 0.8078 - val_loss: 0.1439 - val_accuracy: 0.8247 Epoch 98/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1371 - accuracy: 0.8029 - val_loss: 0.1449 - val_accuracy: 0.8247 Epoch 99/100 20/20 [==============================] - 0s 3ms/step - loss: 0.1368 - accuracy: 0.8094 - val_loss: 0.1443 - val_accuracy: 0.8247 Epoch 100/100 20/20 [==============================] - 0s 4ms/step - loss: 0.1366 - accuracy: 0.8062 - val_loss: 0.1442 - val_accuracy: 0.8182
# Train and Test accuracy
scores = model.evaluate(x_train,y_train)
print("Training Accuracy: %.2f%%\n" % (scores[1]*100))
scores = model.evaluate(x_test,y_test)
print("Testing Accuracy: %.2f%%\n" % (scores[1]*100))
20/20 [==============================] - 0s 2ms/step - loss: 0.1360 - accuracy: 0.8078 Training Accuracy: 80.78% 5/5 [==============================] - 0s 3ms/step - loss: 0.1442 - accuracy: 0.8182 Testing Accuracy: 81.82%
y_pred = model.predict(x_test)
y_pred = (y_pred > 0.5)
print(np.concatenate((y_pred.reshape(len(y_pred),1), y_test.reshape(len(y_test),1)),1))
[[0 0] [1 1] [1 1] [0 0] [0 0] [0 0] [0 1] [1 0] [0 0] [0 0] [0 0] [1 1] [0 0] [0 0] [0 0] [0 0] [1 1] [0 1] [0 1] [0 0] [0 1] [1 1] [0 0] [0 0] [1 1] [1 1] [1 0] [1 1] [1 1] [0 0] [1 1] [1 1] [0 0] [1 1] [0 0] [0 0] [0 0] [1 1] [0 0] [0 0] [0 0] [0 0] [0 0] [1 1] [1 1] [0 0] [0 0] [1 1] [0 0] [0 0] [0 0] [0 0] [0 0] [1 1] [1 1] [0 0] [0 1] [1 0] [0 0] [0 0] [0 0] [1 1] [0 0] [0 0] [0 1] [0 0] [1 0] [0 0] [0 0] [0 0] [0 0] [0 0] [1 1] [0 0] [0 0] [1 1] [0 1] [0 0] [0 0] [1 0] [0 0] [0 0] [1 0] [0 1] [0 0] [0 0] [1 1] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [1 1] [1 1] [1 1] [0 0] [0 0] [0 0] [0 1] [0 0] [0 0] [1 1] [0 1] [0 0] [1 1] [0 1] [1 1] [0 0] [1 1] [0 1] [0 0] [0 0] [1 1] [0 0] [0 0] [0 0] [0 0] [1 1] [1 1] [0 0] [0 0] [0 1] [0 0] [0 0] [0 1] [0 0] [0 0] [0 1] [1 0] [0 1] [1 1] [0 0] [0 0] [0 1] [0 1] [0 0] [1 0] [0 0] [0 0] [0 0] [0 0] [0 0] [1 1] [0 0] [0 0] [1 0] [0 0] [1 1] [0 0] [1 1] [0 1] [0 0]]
from sklearn.metrics import confusion_matrix, accuracy_score
cm = confusion_matrix(y_test, y_pred)
print(cm)
accuracy_score(y_test, y_pred)
[[90 9] [19 36]]
0.8181818181818182
ax = sns.heatmap(cm, annot=True, xticklabels=["No Diabetes",'Diabetes'], yticklabels=['No Diabetes','Diabetes'],
cbar=False,cmap='Blues')
ax.set_xlabel('Prediction')
ax.set_ylabel('Actual')
plt.show()
acc = hist.history['accuracy']
loss = hist.history['loss']
from matplotlib import pyplot as plt
plt.plot(hist.history['accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train'], loc='upper left')
plt.show()
plt.savefig('accuracy_curve.png')
<Figure size 432x288 with 0 Axes>
plt.plot(hist.history['loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train'], loc='upper left')
plt.show()
plt.savefig('loss_curve.png')
<Figure size 432x288 with 0 Axes>