import numpy
import pandas as pd
from sklearn import preprocessing
numpy.random.seed(10)
all_df = pd.read_excel("/Users/hannah/titanic3.xls")
cols=['survived','name','pclass','sex','age','sibsp','parch','fare','embarked']
all_df=all_df[cols]
msk = numpy.random.rand(len(all_df))
train_df = all_df[msk]
test_df = all_df[~msk]
print('total:',len(all_df),'train:',len(train_df),'test:',len(test_df))
output:
total: 1309 train: 1034 test: 275
train_Features,train_Label=PreprocessData(train_df)
test_Features,test_Label=PreprocessData(test_df
from keras.models import Sequential
from keras.layers import Dense,Dropout
output:
Using TensorFlow backend.
model=Sequential()
model.add(Dense(units=40,input_dim=9,kernel_initializer='uniform',activation='relu'))
model.add(Dense(units=30,kernel_initializer='uniform',activation='relu'))
model.add(Dense(units=1,input_dim=9,kernel_initializer='uniform',activation='sigmoid'))
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
train_history=model.fit(x=train_Features,y=train_Label,validation_split=0.1,epochs=30,batch_size=30,verbose=2)
output:
Train on 930 samples, validate on 104 samples
Epoch 1/30
- 0s - loss: 0.6899 - accuracy: 0.5774 - val_loss: 0.6706 - val_accuracy: 0.7885
Epoch 2/30
- 0s - loss: 0.6678 - accuracy: 0.5968 - val_loss: 0.5918 - val_accuracy: 0.7885
Epoch 3/30
- 0s - loss: 0.6103 - accuracy: 0.6667 - val_loss: 0.4945 - val_accuracy: 0.8173
Epoch 4/30
- 0s - loss: 0.5461 - accuracy: 0.7624 - val_loss: 0.4624 - val_accuracy: 0.7596
Epoch 5/30
- 0s - loss: 0.5065 - accuracy: 0.7602 - val_loss: 0.4538 - val_accuracy: 0.7885
Epoch 6/30
- 0s - loss: 0.4908 - accuracy: 0.7645 - val_loss: 0.4398 - val_accuracy: 0.7788
Epoch 7/30
- 0s - loss: 0.4820 - accuracy: 0.7591 - val_loss: 0.4423 - val_accuracy: 0.7885
Epoch 8/30
- 0s - loss: 0.4786 - accuracy: 0.7581 - val_loss: 0.4348 - val_accuracy: 0.7981
Epoch 9/30
- 0s - loss: 0.4728 - accuracy: 0.7624 - val_loss: 0.4274 - val_accuracy: 0.7885
Epoch 10/30
- 0s - loss: 0.4695 - accuracy: 0.7796 - val_loss: 0.4253 - val_accuracy: 0.8077
Epoch 11/30
- 0s - loss: 0.4655 - accuracy: 0.7634 - val_loss: 0.4225 - val_accuracy: 0.8173
Epoch 12/30
- 0s - loss: 0.4638 - accuracy: 0.7710 - val_loss: 0.4188 - val_accuracy: 0.8365
Epoch 13/30
- 0s - loss: 0.4621 - accuracy: 0.7796 - val_loss: 0.4191 - val_accuracy: 0.8365
Epoch 14/30
- 0s - loss: 0.4606 - accuracy: 0.7720 - val_loss: 0.4191 - val_accuracy: 0.8365
Epoch 15/30
- 0s - loss: 0.4626 - accuracy: 0.7849 - val_loss: 0.4201 - val_accuracy: 0.8269
Epoch 16/30
- 0s - loss: 0.4563 - accuracy: 0.7817 - val_loss: 0.4167 - val_accuracy: 0.8365
Epoch 17/30
- 0s - loss: 0.4549 - accuracy: 0.7849 - val_loss: 0.4169 - val_accuracy: 0.8365
Epoch 18/30
- 0s - loss: 0.4539 - accuracy: 0.7871 - val_loss: 0.4124 - val_accuracy: 0.7981
Epoch 19/30
- 0s - loss: 0.4537 - accuracy: 0.7957 - val_loss: 0.4206 - val_accuracy: 0.8173
Epoch 20/30
- 0s - loss: 0.4508 - accuracy: 0.7882 - val_loss: 0.4128 - val_accuracy: 0.8173
Epoch 21/30
- 0s - loss: 0.4506 - accuracy: 0.7946 - val_loss: 0.4133 - val_accuracy: 0.8077
Epoch 22/30
- 0s - loss: 0.4493 - accuracy: 0.7957 - val_loss: 0.4139 - val_accuracy: 0.8269
Epoch 23/30
- 0s - loss: 0.4521 - accuracy: 0.7871 - val_loss: 0.4155 - val_accuracy: 0.8269
Epoch 24/30
- 0s - loss: 0.4486 - accuracy: 0.7871 - val_loss: 0.4165 - val_accuracy: 0.8269
Epoch 25/30
- 0s - loss: 0.4488 - accuracy: 0.7903 - val_loss: 0.4197 - val_accuracy: 0.8269
Epoch 26/30
- 0s - loss: 0.4500 - accuracy: 0.7968 - val_loss: 0.4187 - val_accuracy: 0.8269
Epoch 27/30
- 0s - loss: 0.4457 - accuracy: 0.8022 - val_loss: 0.4150 - val_accuracy: 0.8077
Epoch 28/30
- 0s - loss: 0.4496 - accuracy: 0.7882 - val_loss: 0.4141 - val_accuracy: 0.8077
Epoch 29/30
- 0s - loss: 0.4455 - accuracy: 0.7957 - val_loss: 0.4179 - val_accuracy: 0.8173
Epoch 30/30
- 0s - loss: 0.4523 - accuracy: 0.7903 - val_loss: 0.4172 - val_accuracy: 0.8173