Required Packages

import sys
import datetime
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import sklearn
import tensorflow as tf

plt.rcParams['figure.figsize'] = (8, 8)

Version check

print('Python: {}'.format(sys.version))
print('Numpy: {}'.format(np.__version__))
print('Matplotlib: {}'.format(mpl.__version__))
print('Seaborn: {}'.format(sns.__version__))
print('Pandas: {}'.format(pd.__version__))
print('Scikit-learn: {}'.format(sklearn.__version__))
print('Tensorflow: {}'.format(tf.__version__))
Python: 3.7.6 (default, Jan  8 2020, 20:23:39) [MSC v.1916 64 bit (AMD64)]
Numpy: 1.18.1
Matplotlib: 3.1.3
Seaborn: 0.10.0
Pandas: 1.0.1
Scikit-learn: 0.22.1
Tensorflow: 2.1.0

Prepare Dataset

Currently, PIMA indian diabetes dataset is offered from kaggle

df = pd.read_csv('./dataset/datasets_228_482_diabetes.csv')
df.head()
Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
0 6 148 72 35 0 33.6 0.627 50 1
1 1 85 66 29 0 26.6 0.351 31 0
2 8 183 64 0 0 23.3 0.672 32 1
3 1 89 66 23 94 28.1 0.167 21 0
4 0 137 40 35 168 43.1 2.288 33 1
df.describe()
Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
count 768.000000 768.000000 768.000000 768.000000 768.000000 768.000000 768.000000 768.000000 768.000000
mean 3.845052 120.894531 69.105469 20.536458 79.799479 31.992578 0.471876 33.240885 0.348958
std 3.369578 31.972618 19.355807 15.952218 115.244002 7.884160 0.331329 11.760232 0.476951
min 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.078000 21.000000 0.000000
25% 1.000000 99.000000 62.000000 0.000000 0.000000 27.300000 0.243750 24.000000 0.000000
50% 3.000000 117.000000 72.000000 23.000000 30.500000 32.000000 0.372500 29.000000 0.000000
75% 6.000000 140.250000 80.000000 32.000000 127.250000 36.600000 0.626250 41.000000 1.000000
max 17.000000 199.000000 122.000000 99.000000 846.000000 67.100000 2.420000 81.000000 1.000000
df[df['Glucose'] == 0]
Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
75 1 0 48 20 0 24.7 0.140 22 0
182 1 0 74 20 23 27.7 0.299 21 0
342 1 0 68 35 0 32.0 0.389 22 0
349 5 0 80 32 0 41.0 0.346 37 1
502 6 0 68 41 0 39.0 0.727 41 1

Preprocess Dataset

columns = ['Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI']

for col in columns:
    df[col].replace(0, np.nan, inplace=True)
    
df.describe()
Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
count 768.000000 763.000000 733.000000 541.000000 394.000000 757.000000 768.000000 768.000000 768.000000
mean 3.845052 121.686763 72.405184 29.153420 155.548223 32.457464 0.471876 33.240885 0.348958
std 3.369578 30.535641 12.382158 10.476982 118.775855 6.924988 0.331329 11.760232 0.476951
min 0.000000 44.000000 24.000000 7.000000 14.000000 18.200000 0.078000 21.000000 0.000000
25% 1.000000 99.000000 64.000000 22.000000 76.250000 27.500000 0.243750 24.000000 0.000000
50% 3.000000 117.000000 72.000000 29.000000 125.000000 32.300000 0.372500 29.000000 0.000000
75% 6.000000 141.000000 80.000000 36.000000 190.000000 36.600000 0.626250 41.000000 1.000000
max 17.000000 199.000000 122.000000 99.000000 846.000000 67.100000 2.420000 81.000000 1.000000
df.dropna(inplace=True)

# Summarize the number of rows and columns in df
df.describe()
Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
count 392.000000 392.000000 392.000000 392.000000 392.000000 392.000000 392.000000 392.000000 392.000000
mean 3.301020 122.627551 70.663265 29.145408 156.056122 33.086224 0.523046 30.864796 0.331633
std 3.211424 30.860781 12.496092 10.516424 118.841690 7.027659 0.345488 10.200777 0.471401
min 0.000000 56.000000 24.000000 7.000000 14.000000 18.200000 0.085000 21.000000 0.000000
25% 1.000000 99.000000 62.000000 21.000000 76.750000 28.400000 0.269750 23.000000 0.000000
50% 2.000000 119.000000 70.000000 29.000000 125.500000 33.200000 0.449500 27.000000 0.000000
75% 5.000000 143.000000 78.000000 37.000000 190.000000 37.100000 0.687000 36.000000 1.000000
max 17.000000 198.000000 110.000000 63.000000 846.000000 67.100000 2.420000 81.000000 1.000000
dataset = df.values
print(dataset)
print(dataset.shape)
[[  1.     89.     66.    ...   0.167  21.      0.   ]
 [  0.    137.     40.    ...   2.288  33.      1.   ]
 [  3.     78.     50.    ...   0.248  26.      1.   ]
 ...
 [  2.     88.     58.    ...   0.766  22.      0.   ]
 [ 10.    101.     76.    ...   0.171  63.      0.   ]
 [  5.    121.     72.    ...   0.245  30.      0.   ]]
(392, 9)
X = dataset[:, :-1]
y = dataset[:, -1].astype(int)
print(X.shape, y.shape)
(392, 8) (392,)
from sklearn.preprocessing import StandardScaler

scaler = StandardScaler().fit(X)
X_standard = scaler.transform(X)
data = pd.DataFrame(X_standard)
data.describe()
0 1 2 3 4 5 6 7
count 3.920000e+02 3.920000e+02 3.920000e+02 3.920000e+02 3.920000e+02 3.920000e+02 3.920000e+02 3.920000e+02
mean -4.021726e-17 3.129583e-17 -4.641624e-16 1.042250e-16 6.485742e-17 1.543550e-16 3.880116e-17 1.028089e-16
std 1.001278e+00 1.001278e+00 1.001278e+00 1.001278e+00 1.001278e+00 1.001278e+00 1.001278e+00 1.001278e+00
min -1.029213e+00 -2.161731e+00 -3.739001e+00 -2.108484e+00 -1.196867e+00 -2.120941e+00 -1.269525e+00 -9.682991e-01
25% -7.174265e-01 -7.665958e-01 -6.941640e-01 -7.755315e-01 -6.681786e-01 -6.676780e-01 -7.340909e-01 -7.719850e-01
50% -4.056403e-01 -1.176959e-01 -5.314565e-02 -1.384444e-02 -2.574448e-01 1.621036e-02 -2.131475e-01 -3.793569e-01
75% 5.297185e-01 6.609841e-01 5.878727e-01 7.478426e-01 2.859877e-01 5.718696e-01 4.751644e-01 5.040564e-01
max 4.271153e+00 2.445459e+00 3.151946e+00 3.223325e+00 5.812990e+00 4.846172e+00 5.497667e+00 4.921123e+00

Build Neural Network

from sklearn.model_selection import GridSearchCV, KFold
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.keras.optimizers import Adam
def create_model():
    # Create model
    model = Sequential()
    model.add(Dense(8, input_shape=(8, ), kernel_initializer='normal', activation='relu'))
    model.add(Dense(4, kernel_initializer='normal', activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile model
    model.compile(optimizer=Adam(lr=0.01), loss='binary_crossentropy', metrics=['accuracy'])
    return model

model = create_model()
print(model.summary())
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_3 (Dense)              (None, 8)                 72        
_________________________________________________________________
dense_4 (Dense)              (None, 4)                 36        
_________________________________________________________________
dense_5 (Dense)              (None, 1)                 5         
=================================================================
Total params: 113
Trainable params: 113
Non-trainable params: 0
_________________________________________________________________
None
seed = 6
np.random.seed(seed)

# Create model with KerasClassifier
model = KerasClassifier(build_fn=create_model, verbose=False)
batch_size = [10, 20, 40]
epochs = [10, 50, 100]

# Make a dictionary of the grid search parameters
param_grid = {
    'batch_size':batch_size,
    'epochs':epochs
}

# Build and fit the GridSearchCV
grid = GridSearchCV(estimator=model, param_grid=param_grid, cv=KFold(random_state=seed), verbose=10)
grid_results = grid.fit(X_standard, y)

# Summarize the results
print('Best: {0}, using {1} '.format(grid_results.best_score_, grid_results.best_params_))
means = grid_results.cv_results_['mean_test_score']
stds = grid_results.cv_results_['std_test_score']
params = grid_results.cv_results_['params']

for mean, stdev, param in zip(means, stds, params):
    print('{0} ({1}) with: {2}'.format(mean, stdev, param))
C:\Users\kcsgo\anaconda3\lib\site-packages\sklearn\model_selection\_split.py:296: FutureWarning: Setting a random_state has no effect since shuffle is False. This will raise an error in 0.24. You should leave random_state to its default (None), or set shuffle=True.
  FutureWarning
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
Fitting 5 folds for each of 9 candidates, totalling 45 fits
[CV] batch_size=10, epochs=10 ........................................
[CV] ............ batch_size=10, epochs=10, score=0.759, total=   1.5s
[CV] batch_size=10, epochs=10 ........................................
[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    1.4s remaining:    0.0s
[CV] ............ batch_size=10, epochs=10, score=0.633, total=   0.9s
[CV] batch_size=10, epochs=10 ........................................
[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    2.3s remaining:    0.0s
[CV] ............ batch_size=10, epochs=10, score=0.833, total=   0.9s
[CV] batch_size=10, epochs=10 ........................................
[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    3.2s remaining:    0.0s
[CV] ............ batch_size=10, epochs=10, score=0.859, total=   0.9s
[CV] batch_size=10, epochs=10 ........................................
[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    4.1s remaining:    0.0s
[CV] ............ batch_size=10, epochs=10, score=0.808, total=   0.9s
[CV] batch_size=10, epochs=50 ........................................
[Parallel(n_jobs=1)]: Done   5 out of   5 | elapsed:    4.9s remaining:    0.0s
[CV] ............ batch_size=10, epochs=50, score=0.709, total=   3.2s
[CV] batch_size=10, epochs=50 ........................................
[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:    8.2s remaining:    0.0s
[CV] ............ batch_size=10, epochs=50, score=0.646, total=   3.2s
[CV] batch_size=10, epochs=50 ........................................
[Parallel(n_jobs=1)]: Done   7 out of   7 | elapsed:   11.4s remaining:    0.0s
[CV] ............ batch_size=10, epochs=50, score=0.846, total=   3.2s
[CV] batch_size=10, epochs=50 ........................................
[Parallel(n_jobs=1)]: Done   8 out of   8 | elapsed:   14.6s remaining:    0.0s
[CV] ............ batch_size=10, epochs=50, score=0.808, total=   3.2s
[CV] batch_size=10, epochs=50 ........................................
[Parallel(n_jobs=1)]: Done   9 out of   9 | elapsed:   17.9s remaining:    0.0s
[CV] ............ batch_size=10, epochs=50, score=0.795, total=   3.2s
[CV] batch_size=10, epochs=100 .......................................
[CV] ........... batch_size=10, epochs=100, score=0.772, total=   6.1s
[CV] batch_size=10, epochs=100 .......................................
[CV] ........... batch_size=10, epochs=100, score=0.646, total=   6.1s
[CV] batch_size=10, epochs=100 .......................................
[CV] ........... batch_size=10, epochs=100, score=0.756, total=   6.1s
[CV] batch_size=10, epochs=100 .......................................
[CV] ........... batch_size=10, epochs=100, score=0.833, total=   6.1s
[CV] batch_size=10, epochs=100 .......................................
[CV] ........... batch_size=10, epochs=100, score=0.846, total=   6.1s
[CV] batch_size=20, epochs=10 ........................................
[CV] ............ batch_size=20, epochs=10, score=0.759, total=   0.6s
[CV] batch_size=20, epochs=10 ........................................
[CV] ............ batch_size=20, epochs=10, score=0.608, total=   1.0s
[CV] batch_size=20, epochs=10 ........................................
[CV] ............ batch_size=20, epochs=10, score=0.808, total=   0.6s
[CV] batch_size=20, epochs=10 ........................................
[CV] ............ batch_size=20, epochs=10, score=0.808, total=   0.6s
[CV] batch_size=20, epochs=10 ........................................
[CV] ............ batch_size=20, epochs=10, score=0.846, total=   0.6s
[CV] batch_size=20, epochs=50 ........................................
[CV] ............ batch_size=20, epochs=50, score=0.785, total=   1.8s
[CV] batch_size=20, epochs=50 ........................................
[CV] ............ batch_size=20, epochs=50, score=0.658, total=   1.8s
[CV] batch_size=20, epochs=50 ........................................
[CV] ............ batch_size=20, epochs=50, score=0.808, total=   1.8s
[CV] batch_size=20, epochs=50 ........................................
[CV] ............ batch_size=20, epochs=50, score=0.808, total=   1.8s
[CV] batch_size=20, epochs=50 ........................................
[CV] ............ batch_size=20, epochs=50, score=0.872, total=   1.8s
[CV] batch_size=20, epochs=100 .......................................
[CV] ........... batch_size=20, epochs=100, score=0.772, total=   3.2s
[CV] batch_size=20, epochs=100 .......................................
[CV] ........... batch_size=20, epochs=100, score=0.684, total=   3.2s
[CV] batch_size=20, epochs=100 .......................................
[CV] ........... batch_size=20, epochs=100, score=0.795, total=   3.2s
[CV] batch_size=20, epochs=100 .......................................
[CV] ........... batch_size=20, epochs=100, score=0.769, total=   3.2s
[CV] batch_size=20, epochs=100 .......................................
[CV] ........... batch_size=20, epochs=100, score=0.859, total=   3.2s
[CV] batch_size=40, epochs=10 ........................................
[CV] ............ batch_size=40, epochs=10, score=0.797, total=   0.5s
[CV] batch_size=40, epochs=10 ........................................
[CV] ............ batch_size=40, epochs=10, score=0.608, total=   0.5s
[CV] batch_size=40, epochs=10 ........................................
[CV] ............ batch_size=40, epochs=10, score=0.808, total=   0.4s
[CV] batch_size=40, epochs=10 ........................................
[CV] ............ batch_size=40, epochs=10, score=0.821, total=   0.5s
[CV] batch_size=40, epochs=10 ........................................
[CV] ............ batch_size=40, epochs=10, score=0.833, total=   0.4s
[CV] batch_size=40, epochs=50 ........................................
[CV] ............ batch_size=40, epochs=50, score=0.747, total=   1.5s
[CV] batch_size=40, epochs=50 ........................................
[CV] ............ batch_size=40, epochs=50, score=0.671, total=   1.0s
[CV] batch_size=40, epochs=50 ........................................
[CV] ............ batch_size=40, epochs=50, score=0.821, total=   1.0s
[CV] batch_size=40, epochs=50 ........................................
[CV] ............ batch_size=40, epochs=50, score=0.821, total=   1.0s
[CV] batch_size=40, epochs=50 ........................................
[CV] ............ batch_size=40, epochs=50, score=0.833, total=   1.0s
[CV] batch_size=40, epochs=100 .......................................
[CV] ........... batch_size=40, epochs=100, score=0.696, total=   1.8s
[CV] batch_size=40, epochs=100 .......................................
[CV] ........... batch_size=40, epochs=100, score=0.696, total=   1.8s
[CV] batch_size=40, epochs=100 .......................................
[CV] ........... batch_size=40, epochs=100, score=0.756, total=   1.8s
[CV] batch_size=40, epochs=100 .......................................
[CV] ........... batch_size=40, epochs=100, score=0.821, total=   1.8s
[CV] batch_size=40, epochs=100 .......................................
[CV] ........... batch_size=40, epochs=100, score=0.808, total=   1.8s
[Parallel(n_jobs=1)]: Done  45 out of  45 | elapsed:  1.6min finished
Best: 0.7860434889793396, using {'batch_size': 20, 'epochs': 50} 
0.7784809947013855 (0.07986524360686026) with: {'batch_size': 10, 'epochs': 10}
0.760629665851593 (0.07296661628343429) with: {'batch_size': 10, 'epochs': 50}
0.7707237839698792 (0.07138645524930724) with: {'batch_size': 10, 'epochs': 100}
0.7657253980636597 (0.08370813995870807) with: {'batch_size': 20, 'epochs': 10}
0.7860434889793396 (0.07018404097720617) with: {'batch_size': 20, 'epochs': 50}
0.7757546186447144 (0.05630146625584115) with: {'batch_size': 20, 'epochs': 100}
0.7733203411102295 (0.08373549454888415) with: {'batch_size': 40, 'epochs': 10}
0.778416109085083 (0.06183883425960871) with: {'batch_size': 40, 'epochs': 50}
0.7554040789604187 (0.052884532540700184) with: {'batch_size': 40, 'epochs': 100}

Applying Dropout, Optimizing learning rate

from tensorflow.keras.layers import Dropout

def create_model(learning_rate, dropout_rate):
    # Create model
    model = Sequential()
    model.add(Dense(8, input_shape=(8, ), kernel_initializer='normal', activation='relu'))
    model.add(Dropout(dropout_rate))
    model.add(Dense(4, kernel_initializer='normal', activation='relu'))
    model.add(Dropout(dropout_rate))
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile model
    model.compile(optimizer=Adam(lr=learning_rate), loss='binary_crossentropy', metrics=['accuracy'])
    return model

# Create model with KerasClassifier
model = KerasClassifier(build_fn=create_model, epochs=50, batch_size=20, verbose=False)

# Define Grid Search parameter
learning_rates = [0.001, 0.01, 0.1]
dropout_rates = [0.0, 0.1, 0.2]

# Make a dictionary of the grid search parameters
param_grid = {
    'learning_rate':learning_rates,
    'dropout_rate':dropout_rates
}

# Build and fit the GridSearchCV
grid = GridSearchCV(estimator=model, param_grid=param_grid, cv=KFold(random_state=seed), verbose=10)
grid_results = grid.fit(X_standard, y)

# Summarize the results
print('Best: {0}, using {1} '.format(grid_results.best_score_, grid_results.best_params_))
means = grid_results.cv_results_['mean_test_score']
stds = grid_results.cv_results_['std_test_score']
params = grid_results.cv_results_['params']

for mean, stdev, param in zip(means, stds, params):
    print('{0} ({1}) with: {2}'.format(mean, stdev, param))
Fitting 5 folds for each of 9 candidates, totalling 45 fits
[CV] dropout_rate=0.0, learning_rate=0.001 ...........................
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[CV]  dropout_rate=0.0, learning_rate=0.001, score=0.785, total=   1.8s
[CV] dropout_rate=0.0, learning_rate=0.001 ...........................
[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    1.7s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.001, score=0.595, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.001 ...........................
[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    3.4s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.001, score=0.821, total=   1.8s
[CV] dropout_rate=0.0, learning_rate=0.001 ...........................
[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    5.2s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.001, score=0.821, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.001 ...........................
[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    6.9s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.001, score=0.859, total=   2.1s
[CV] dropout_rate=0.0, learning_rate=0.01 ............................
[Parallel(n_jobs=1)]: Done   5 out of   5 | elapsed:    9.0s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.01, score=0.747, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.01 ............................
[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:   10.8s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.01, score=0.633, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.01 ............................
[Parallel(n_jobs=1)]: Done   7 out of   7 | elapsed:   12.5s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.01, score=0.833, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.01 ............................
[Parallel(n_jobs=1)]: Done   8 out of   8 | elapsed:   14.2s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.01, score=0.821, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.01 ............................
[Parallel(n_jobs=1)]: Done   9 out of   9 | elapsed:   16.0s remaining:    0.0s
[CV]  dropout_rate=0.0, learning_rate=0.01, score=0.795, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.1 .............................
[CV] . dropout_rate=0.0, learning_rate=0.1, score=0.722, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.1 .............................
[CV] . dropout_rate=0.0, learning_rate=0.1, score=0.722, total=   1.8s
[CV] dropout_rate=0.0, learning_rate=0.1 .............................
[CV] . dropout_rate=0.0, learning_rate=0.1, score=0.833, total=   1.8s
[CV] dropout_rate=0.0, learning_rate=0.1 .............................
[CV] . dropout_rate=0.0, learning_rate=0.1, score=0.756, total=   1.7s
[CV] dropout_rate=0.0, learning_rate=0.1 .............................
[CV] . dropout_rate=0.0, learning_rate=0.1, score=0.885, total=   1.7s
[CV] dropout_rate=0.1, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.1, learning_rate=0.001, score=0.734, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.1, learning_rate=0.001, score=0.620, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.1, learning_rate=0.001, score=0.808, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.1, learning_rate=0.001, score=0.821, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.1, learning_rate=0.001, score=0.859, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.01 ............................
[CV]  dropout_rate=0.1, learning_rate=0.01, score=0.709, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.01 ............................
[CV]  dropout_rate=0.1, learning_rate=0.01, score=0.658, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.01 ............................
[CV]  dropout_rate=0.1, learning_rate=0.01, score=0.795, total=   2.2s
[CV] dropout_rate=0.1, learning_rate=0.01 ............................
[CV]  dropout_rate=0.1, learning_rate=0.01, score=0.795, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.01 ............................
[CV]  dropout_rate=0.1, learning_rate=0.01, score=0.821, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.1 .............................
[CV] . dropout_rate=0.1, learning_rate=0.1, score=0.709, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.1 .............................
[CV] . dropout_rate=0.1, learning_rate=0.1, score=0.658, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.1 .............................
[CV] . dropout_rate=0.1, learning_rate=0.1, score=0.756, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.1 .............................
[CV] . dropout_rate=0.1, learning_rate=0.1, score=0.769, total=   1.8s
[CV] dropout_rate=0.1, learning_rate=0.1 .............................
[CV] . dropout_rate=0.1, learning_rate=0.1, score=0.821, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.2, learning_rate=0.001, score=0.772, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.2, learning_rate=0.001, score=0.608, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.2, learning_rate=0.001, score=0.833, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.2, learning_rate=0.001, score=0.833, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.001 ...........................
[CV]  dropout_rate=0.2, learning_rate=0.001, score=0.821, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.01 ............................
[CV]  dropout_rate=0.2, learning_rate=0.01, score=0.747, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.01 ............................
[CV]  dropout_rate=0.2, learning_rate=0.01, score=0.620, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.01 ............................
[CV]  dropout_rate=0.2, learning_rate=0.01, score=0.795, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.01 ............................
[CV]  dropout_rate=0.2, learning_rate=0.01, score=0.808, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.01 ............................
[CV]  dropout_rate=0.2, learning_rate=0.01, score=0.833, total=   1.9s
[CV] dropout_rate=0.2, learning_rate=0.1 .............................
[CV] . dropout_rate=0.2, learning_rate=0.1, score=0.759, total=   1.9s
[CV] dropout_rate=0.2, learning_rate=0.1 .............................
[CV] . dropout_rate=0.2, learning_rate=0.1, score=0.608, total=   2.3s
[CV] dropout_rate=0.2, learning_rate=0.1 .............................
[CV] . dropout_rate=0.2, learning_rate=0.1, score=0.782, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.1 .............................
[CV] . dropout_rate=0.2, learning_rate=0.1, score=0.756, total=   1.8s
[CV] dropout_rate=0.2, learning_rate=0.1 .............................
[CV] . dropout_rate=0.2, learning_rate=0.1, score=0.808, total=   1.8s
[Parallel(n_jobs=1)]: Done  45 out of  45 | elapsed:  1.4min finished
Best: 0.783479380607605, using {'dropout_rate': 0.0, 'learning_rate': 0.1} 
0.7759493708610534 (0.0934976350249817) with: {'dropout_rate': 0.0, 'learning_rate': 0.001}
0.7656929612159729 (0.0726889161193493) with: {'dropout_rate': 0.0, 'learning_rate': 0.01}
0.783479380607605 (0.06499972119101476) with: {'dropout_rate': 0.0, 'learning_rate': 0.1}
0.7683219671249389 (0.08435875729999517) with: {'dropout_rate': 0.1, 'learning_rate': 0.001}
0.7554690122604371 (0.06158576138404698) with: {'dropout_rate': 0.1, 'learning_rate': 0.01}
0.7426484942436218 (0.055185710792480795) with: {'dropout_rate': 0.1, 'learning_rate': 0.1}
0.7733852505683899 (0.08590286195286612) with: {'dropout_rate': 0.2, 'learning_rate': 0.001}
0.7605972051620483 (0.07558978386106281) with: {'dropout_rate': 0.2, 'learning_rate': 0.01}
0.742648470401764 (0.06999412819842643) with: {'dropout_rate': 0.2, 'learning_rate': 0.1}

Weight Initialization, Activation function

def create_model(activation, initializer):
    # Create model
    model = Sequential()
    model.add(Dense(8, input_shape=(8, ), kernel_initializer=initializer, activation=activation))
    model.add(Dense(4, kernel_initializer=initializer, activation=activation))
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile model
    model.compile(optimizer=Adam(lr=0.1), loss='binary_crossentropy', metrics=['accuracy'])
    return model

# Create model with KerasClassifier
model = KerasClassifier(build_fn=create_model, epochs=50, batch_size=20, verbose=False)

# Define Grid Search parameter
activations = ['softmax', 'relu', 'tanh', 'linear']
initializers = ['normal', 'uniform', 'zero']

# Make a dictionary of the grid search parameters
param_grid = {
    'activation':activations,
    'initializer':initializers
}

# Build and fit the GridSearchCV
grid = GridSearchCV(estimator=model, param_grid=param_grid, cv=KFold(random_state=seed), verbose=10)
grid_results = grid.fit(X_standard, y)

# Summarize the results
print('Best: {0}, using {1} '.format(grid_results.best_score_, grid_results.best_params_))
means = grid_results.cv_results_['mean_test_score']
stds = grid_results.cv_results_['std_test_score']
params = grid_results.cv_results_['params']

for mean, stdev, param in zip(means, stds, params):
    print('{0} ({1}) with: {2}'.format(mean, stdev, param))
C:\Users\kcsgo\anaconda3\lib\site-packages\sklearn\model_selection\_split.py:296: FutureWarning: Setting a random_state has no effect since shuffle is False. This will raise an error in 0.24. You should leave random_state to its default (None), or set shuffle=True.
  FutureWarning
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
Fitting 5 folds for each of 12 candidates, totalling 60 fits
[CV] activation=softmax, initializer=normal ..........................
[CV]  activation=softmax, initializer=normal, score=0.696, total=   1.8s
[CV] activation=softmax, initializer=normal ..........................
[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    1.7s remaining:    0.0s
[CV]  activation=softmax, initializer=normal, score=0.646, total=   1.7s
[CV] activation=softmax, initializer=normal ..........................
[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    3.5s remaining:    0.0s
[CV]  activation=softmax, initializer=normal, score=0.833, total=   1.7s
[CV] activation=softmax, initializer=normal ..........................
[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    5.2s remaining:    0.0s
[CV]  activation=softmax, initializer=normal, score=0.808, total=   1.8s
[CV] activation=softmax, initializer=normal ..........................
[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    7.0s remaining:    0.0s
[CV]  activation=softmax, initializer=normal, score=0.795, total=   1.8s
[CV] activation=softmax, initializer=uniform .........................
[Parallel(n_jobs=1)]: Done   5 out of   5 | elapsed:    8.8s remaining:    0.0s
[CV]  activation=softmax, initializer=uniform, score=0.709, total=   1.7s
[CV] activation=softmax, initializer=uniform .........................
[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:   10.5s remaining:    0.0s
[CV]  activation=softmax, initializer=uniform, score=0.608, total=   1.7s
[CV] activation=softmax, initializer=uniform .........................
[Parallel(n_jobs=1)]: Done   7 out of   7 | elapsed:   12.3s remaining:    0.0s
[CV]  activation=softmax, initializer=uniform, score=0.821, total=   1.7s
[CV] activation=softmax, initializer=uniform .........................
[Parallel(n_jobs=1)]: Done   8 out of   8 | elapsed:   14.0s remaining:    0.0s
[CV]  activation=softmax, initializer=uniform, score=0.795, total=   1.8s
[CV] activation=softmax, initializer=uniform .........................
[Parallel(n_jobs=1)]: Done   9 out of   9 | elapsed:   15.8s remaining:    0.0s
[CV]  activation=softmax, initializer=uniform, score=0.769, total=   1.7s
[CV] activation=softmax, initializer=zero ............................
[CV]  activation=softmax, initializer=zero, score=0.646, total=   1.7s
[CV] activation=softmax, initializer=zero ............................
[CV]  activation=softmax, initializer=zero, score=0.570, total=   1.8s
[CV] activation=softmax, initializer=zero ............................
[CV]  activation=softmax, initializer=zero, score=0.705, total=   1.8s
[CV] activation=softmax, initializer=zero ............................
[CV]  activation=softmax, initializer=zero, score=0.744, total=   1.7s
[CV] activation=softmax, initializer=zero ............................
[CV]  activation=softmax, initializer=zero, score=0.679, total=   1.7s
[CV] activation=relu, initializer=normal .............................
[CV] . activation=relu, initializer=normal, score=0.709, total=   1.8s
[CV] activation=relu, initializer=normal .............................
[CV] . activation=relu, initializer=normal, score=0.671, total=   1.7s
[CV] activation=relu, initializer=normal .............................
[CV] . activation=relu, initializer=normal, score=0.808, total=   2.2s
[CV] activation=relu, initializer=normal .............................
[CV] . activation=relu, initializer=normal, score=0.795, total=   1.7s
[CV] activation=relu, initializer=normal .............................
[CV] . activation=relu, initializer=normal, score=0.808, total=   1.7s
[CV] activation=relu, initializer=uniform ............................
[CV]  activation=relu, initializer=uniform, score=0.722, total=   1.7s
[CV] activation=relu, initializer=uniform ............................
[CV]  activation=relu, initializer=uniform, score=0.570, total=   1.7s
[CV] activation=relu, initializer=uniform ............................
[CV]  activation=relu, initializer=uniform, score=0.821, total=   1.7s
[CV] activation=relu, initializer=uniform ............................
[CV]  activation=relu, initializer=uniform, score=0.679, total=   1.8s
[CV] activation=relu, initializer=uniform ............................
[CV]  activation=relu, initializer=uniform, score=0.782, total=   1.7s
[CV] activation=relu, initializer=zero ...............................
[CV] ... activation=relu, initializer=zero, score=0.646, total=   1.7s
[CV] activation=relu, initializer=zero ...............................
[CV] ... activation=relu, initializer=zero, score=0.570, total=   1.7s
[CV] activation=relu, initializer=zero ...............................
[CV] ... activation=relu, initializer=zero, score=0.705, total=   1.7s
[CV] activation=relu, initializer=zero ...............................
[CV] ... activation=relu, initializer=zero, score=0.744, total=   1.7s
[CV] activation=relu, initializer=zero ...............................
[CV] ... activation=relu, initializer=zero, score=0.679, total=   1.7s
[CV] activation=tanh, initializer=normal .............................
[CV] . activation=tanh, initializer=normal, score=0.696, total=   1.7s
[CV] activation=tanh, initializer=normal .............................
[CV] . activation=tanh, initializer=normal, score=0.633, total=   1.7s
[CV] activation=tanh, initializer=normal .............................
[CV] . activation=tanh, initializer=normal, score=0.833, total=   1.7s
[CV] activation=tanh, initializer=normal .............................
[CV] . activation=tanh, initializer=normal, score=0.808, total=   1.7s
[CV] activation=tanh, initializer=normal .............................
[CV] . activation=tanh, initializer=normal, score=0.821, total=   1.7s
[CV] activation=tanh, initializer=uniform ............................
[CV]  activation=tanh, initializer=uniform, score=0.722, total=   1.7s
[CV] activation=tanh, initializer=uniform ............................
[CV]  activation=tanh, initializer=uniform, score=0.595, total=   1.7s
[CV] activation=tanh, initializer=uniform ............................
[CV]  activation=tanh, initializer=uniform, score=0.795, total=   1.7s
[CV] activation=tanh, initializer=uniform ............................
[CV]  activation=tanh, initializer=uniform, score=0.833, total=   1.7s
[CV] activation=tanh, initializer=uniform ............................
[CV]  activation=tanh, initializer=uniform, score=0.769, total=   1.8s
[CV] activation=tanh, initializer=zero ...............................
[CV] ... activation=tanh, initializer=zero, score=0.646, total=   2.2s
[CV] activation=tanh, initializer=zero ...............................
[CV] ... activation=tanh, initializer=zero, score=0.570, total=   1.7s
[CV] activation=tanh, initializer=zero ...............................
[CV] ... activation=tanh, initializer=zero, score=0.705, total=   1.7s
[CV] activation=tanh, initializer=zero ...............................
[CV] ... activation=tanh, initializer=zero, score=0.744, total=   1.7s
[CV] activation=tanh, initializer=zero ...............................
[CV] ... activation=tanh, initializer=zero, score=0.679, total=   1.7s
[CV] activation=linear, initializer=normal ...........................
[CV]  activation=linear, initializer=normal, score=0.823, total=   1.7s
[CV] activation=linear, initializer=normal ...........................
[CV]  activation=linear, initializer=normal, score=0.620, total=   1.7s
[CV] activation=linear, initializer=normal ...........................
[CV]  activation=linear, initializer=normal, score=0.833, total=   1.7s
[CV] activation=linear, initializer=normal ...........................
[CV]  activation=linear, initializer=normal, score=0.846, total=   1.7s
[CV] activation=linear, initializer=normal ...........................
[CV]  activation=linear, initializer=normal, score=0.769, total=   1.7s
[CV] activation=linear, initializer=uniform ..........................
[CV]  activation=linear, initializer=uniform, score=0.823, total=   1.7s
[CV] activation=linear, initializer=uniform ..........................
[CV]  activation=linear, initializer=uniform, score=0.608, total=   1.7s
[CV] activation=linear, initializer=uniform ..........................
[CV]  activation=linear, initializer=uniform, score=0.821, total=   1.7s
[CV] activation=linear, initializer=uniform ..........................
[CV]  activation=linear, initializer=uniform, score=0.846, total=   1.7s
[CV] activation=linear, initializer=uniform ..........................
[CV]  activation=linear, initializer=uniform, score=0.808, total=   1.7s
[CV] activation=linear, initializer=zero .............................
[CV] . activation=linear, initializer=zero, score=0.646, total=   1.7s
[CV] activation=linear, initializer=zero .............................
[CV] . activation=linear, initializer=zero, score=0.570, total=   1.7s
[CV] activation=linear, initializer=zero .............................
[CV] . activation=linear, initializer=zero, score=0.705, total=   1.7s
[CV] activation=linear, initializer=zero .............................
[CV] . activation=linear, initializer=zero, score=0.744, total=   1.7s
[CV] activation=linear, initializer=zero .............................
[CV] . activation=linear, initializer=zero, score=0.679, total=   1.7s
[Parallel(n_jobs=1)]: Done  60 out of  60 | elapsed:  1.7min finished
Best: 0.7809477329254151, using {'activation': 'linear', 'initializer': 'uniform'} 
0.7555339097976684 (0.07201815414751926) with: {'activation': 'softmax', 'initializer': 'normal'}
0.740214216709137 (0.07595814846646004) with: {'activation': 'softmax', 'initializer': 'uniform'}
0.6686789989471436 (0.05899773033245815) with: {'activation': 'softmax', 'initializer': 'zero'}
0.758000648021698 (0.05709934235989969) with: {'activation': 'relu', 'initializer': 'normal'}
0.714638102054596 (0.08725491912068364) with: {'activation': 'relu', 'initializer': 'uniform'}
0.6686789989471436 (0.05899773033245815) with: {'activation': 'relu', 'initializer': 'zero'}
0.7581304669380188 (0.0793955810899181) with: {'activation': 'tanh', 'initializer': 'normal'}
0.7427783250808716 (0.08236839951084776) with: {'activation': 'tanh', 'initializer': 'uniform'}
0.6686789989471436 (0.05899773033245815) with: {'activation': 'tanh', 'initializer': 'zero'}
0.7783511757850647 (0.08327394888280217) with: {'activation': 'linear', 'initializer': 'normal'}
0.7809477329254151 (0.0875603191939732) with: {'activation': 'linear', 'initializer': 'uniform'}
0.6686789989471436 (0.05899773033245815) with: {'activation': 'linear', 'initializer': 'zero'}

Number of Neurons

def create_model(neuron1, neuron2):
    # Create model
    model = Sequential()
    model.add(Dense(neuron1, input_shape=(8, ), kernel_initializer='uniform', activation='linear'))
    model.add(Dense(neuron2, kernel_initializer='uniform', activation='linear'))
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile model
    model.compile(optimizer=Adam(lr=0.1), loss='binary_crossentropy', metrics=['accuracy'])
    return model

# Create model with KerasClassifier
model = KerasClassifier(build_fn=create_model, epochs=50, batch_size=20, verbose=False)

# Define Grid Search parameter
neuron1 = [4, 8, 16]
neuron2 = [2, 4, 8]

# Make a dictionary of the grid search parameters
param_grid = {
    'neuron1':neuron1,
    'neuron2':neuron2
}

# Build and fit the GridSearchCV
grid = GridSearchCV(estimator=model, param_grid=param_grid, cv=KFold(random_state=seed), refit=True,
                    verbose=10)
grid_results = grid.fit(X_standard, y)

# Summarize the results
print('Best: {0}, using {1} '.format(grid_results.best_score_, grid_results.best_params_))
means = grid_results.cv_results_['mean_test_score']
stds = grid_results.cv_results_['std_test_score']
params = grid_results.cv_results_['params']

for mean, stdev, param in zip(means, stds, params):
    print('{0} ({1}) with: {2}'.format(mean, stdev, param))
Fitting 5 folds for each of 9 candidates, totalling 45 fits
[CV] neuron1=4, neuron2=2 ............................................
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[CV] ................ neuron1=4, neuron2=2, score=0.835, total=   1.7s
[CV] neuron1=4, neuron2=2 ............................................
[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    1.6s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=2, score=0.646, total=   1.7s
[CV] neuron1=4, neuron2=2 ............................................
[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    3.3s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=2, score=0.821, total=   1.7s
[CV] neuron1=4, neuron2=2 ............................................
[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    5.0s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=2, score=0.846, total=   1.7s
[CV] neuron1=4, neuron2=2 ............................................
[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    6.7s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=2, score=0.808, total=   1.7s
[CV] neuron1=4, neuron2=4 ............................................
[Parallel(n_jobs=1)]: Done   5 out of   5 | elapsed:    8.3s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=4, score=0.835, total=   1.7s
[CV] neuron1=4, neuron2=4 ............................................
[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:   10.0s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=4, score=0.633, total=   1.7s
[CV] neuron1=4, neuron2=4 ............................................
[Parallel(n_jobs=1)]: Done   7 out of   7 | elapsed:   11.7s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=4, score=0.821, total=   1.6s
[CV] neuron1=4, neuron2=4 ............................................
[Parallel(n_jobs=1)]: Done   8 out of   8 | elapsed:   13.3s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=4, score=0.833, total=   1.6s
[CV] neuron1=4, neuron2=4 ............................................
[Parallel(n_jobs=1)]: Done   9 out of   9 | elapsed:   15.0s remaining:    0.0s
[CV] ................ neuron1=4, neuron2=4, score=0.795, total=   1.6s
[CV] neuron1=4, neuron2=8 ............................................
[CV] ................ neuron1=4, neuron2=8, score=0.823, total=   1.7s
[CV] neuron1=4, neuron2=8 ............................................
[CV] ................ neuron1=4, neuron2=8, score=0.608, total=   1.6s
[CV] neuron1=4, neuron2=8 ............................................
[CV] ................ neuron1=4, neuron2=8, score=0.808, total=   2.2s
[CV] neuron1=4, neuron2=8 ............................................
[CV] ................ neuron1=4, neuron2=8, score=0.846, total=   1.6s
[CV] neuron1=4, neuron2=8 ............................................
[CV] ................ neuron1=4, neuron2=8, score=0.756, total=   1.6s
[CV] neuron1=8, neuron2=2 ............................................
[CV] ................ neuron1=8, neuron2=2, score=0.810, total=   1.7s
[CV] neuron1=8, neuron2=2 ............................................
[CV] ................ neuron1=8, neuron2=2, score=0.671, total=   1.7s
[CV] neuron1=8, neuron2=2 ............................................
[CV] ................ neuron1=8, neuron2=2, score=0.808, total=   1.7s
[CV] neuron1=8, neuron2=2 ............................................
[CV] ................ neuron1=8, neuron2=2, score=0.859, total=   1.7s
[CV] neuron1=8, neuron2=2 ............................................
[CV] ................ neuron1=8, neuron2=2, score=0.808, total=   1.7s
[CV] neuron1=8, neuron2=4 ............................................
[CV] ................ neuron1=8, neuron2=4, score=0.810, total=   1.7s
[CV] neuron1=8, neuron2=4 ............................................
[CV] ................ neuron1=8, neuron2=4, score=0.608, total=   1.6s
[CV] neuron1=8, neuron2=4 ............................................
[CV] ................ neuron1=8, neuron2=4, score=0.833, total=   1.7s
[CV] neuron1=8, neuron2=4 ............................................
[CV] ................ neuron1=8, neuron2=4, score=0.808, total=   1.7s
[CV] neuron1=8, neuron2=4 ............................................
[CV] ................ neuron1=8, neuron2=4, score=0.808, total=   1.7s
[CV] neuron1=8, neuron2=8 ............................................
[CV] ................ neuron1=8, neuron2=8, score=0.823, total=   1.7s
[CV] neuron1=8, neuron2=8 ............................................
[CV] ................ neuron1=8, neuron2=8, score=0.620, total=   1.7s
[CV] neuron1=8, neuron2=8 ............................................
[CV] ................ neuron1=8, neuron2=8, score=0.846, total=   1.7s
[CV] neuron1=8, neuron2=8 ............................................
[CV] ................ neuron1=8, neuron2=8, score=0.833, total=   1.6s
[CV] neuron1=8, neuron2=8 ............................................
[CV] ................ neuron1=8, neuron2=8, score=0.808, total=   1.7s
[CV] neuron1=16, neuron2=2 ...........................................
[CV] ............... neuron1=16, neuron2=2, score=0.835, total=   1.6s
[CV] neuron1=16, neuron2=2 ...........................................
[CV] ............... neuron1=16, neuron2=2, score=0.608, total=   1.7s
[CV] neuron1=16, neuron2=2 ...........................................
[CV] ............... neuron1=16, neuron2=2, score=0.821, total=   1.6s
[CV] neuron1=16, neuron2=2 ...........................................
[CV] ............... neuron1=16, neuron2=2, score=0.679, total=   1.6s
[CV] neuron1=16, neuron2=2 ...........................................
[CV] ............... neuron1=16, neuron2=2, score=0.808, total=   1.6s
[CV] neuron1=16, neuron2=4 ...........................................
[CV] ............... neuron1=16, neuron2=4, score=0.810, total=   1.7s
[CV] neuron1=16, neuron2=4 ...........................................
[CV] ............... neuron1=16, neuron2=4, score=0.633, total=   1.7s
[CV] neuron1=16, neuron2=4 ...........................................
[CV] ............... neuron1=16, neuron2=4, score=0.821, total=   1.6s
[CV] neuron1=16, neuron2=4 ...........................................
[CV] ............... neuron1=16, neuron2=4, score=0.821, total=   1.6s
[CV] neuron1=16, neuron2=4 ...........................................
[CV] ............... neuron1=16, neuron2=4, score=0.833, total=   1.6s
[CV] neuron1=16, neuron2=8 ...........................................
[CV] ............... neuron1=16, neuron2=8, score=0.772, total=   2.2s
[CV] neuron1=16, neuron2=8 ...........................................
[CV] ............... neuron1=16, neuron2=8, score=0.608, total=   1.6s
[CV] neuron1=16, neuron2=8 ...........................................
[CV] ............... neuron1=16, neuron2=8, score=0.821, total=   1.6s
[CV] neuron1=16, neuron2=8 ...........................................
[CV] ............... neuron1=16, neuron2=8, score=0.808, total=   1.7s
[CV] neuron1=16, neuron2=8 ...........................................
[CV] ............... neuron1=16, neuron2=8, score=0.808, total=   1.6s
[Parallel(n_jobs=1)]: Done  45 out of  45 | elapsed:  1.3min finished
Best: 0.7910743236541748, using {'neuron1': 4, 'neuron2': 2} 
0.7910743236541748 (0.07391445944714388) with: {'neuron1': 4, 'neuron2': 2}
0.7834144711494446 (0.07662584260652404) with: {'neuron1': 4, 'neuron2': 4}
0.7681272149085998 (0.08549726378903176) with: {'neuron1': 4, 'neuron2': 8}
0.7910743236541748 (0.06319956338108261) with: {'neuron1': 8, 'neuron2': 2}
0.7732878804206849 (0.08340747393344203) with: {'neuron1': 8, 'neuron2': 4}
0.7860434770584106 (0.08385059892312692) with: {'neuron1': 8, 'neuron2': 8}
0.7501460433006286 (0.09039041145364847) with: {'neuron1': 16, 'neuron2': 2}
0.7834793925285339 (0.07564279683053157) with: {'neuron1': 16, 'neuron2': 4}
0.7631288409233093 (0.07941907087664095) with: {'neuron1': 16, 'neuron2': 8}

Predict with Optimal hyperparameters

y_pred = grid.predict(X_standard)
y_pred.shape
(392, 1)
y_pred[:5]
array([[0],
       [1],
       [0],
       [1],
       [1]])
from sklearn.metrics import classification_report, accuracy_score

print(accuracy_score(y, y_pred))
print(classification_report(y, y_pred))
0.7857142857142857
              precision    recall  f1-score   support

           0       0.84      0.84      0.84       262
           1       0.67      0.68      0.68       130

    accuracy                           0.79       392
   macro avg       0.76      0.76      0.76       392
weighted avg       0.79      0.79      0.79       392

example = df.iloc[0]
example
Pregnancies                  1.000
Glucose                     89.000
BloodPressure               66.000
SkinThickness               23.000
Insulin                     94.000
BMI                         28.100
DiabetesPedigreeFunction     0.167
Age                         21.000
Outcome                      0.000
Name: 3, dtype: float64
prediction = grid.predict(X_standard[0].reshape(1, -1))
prediction
array([[0]])