import pandas as pd
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation, Embedding, Merge, Flatten
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
%matplotlib inline
Using TensorFlow backend.
df = pd.read_csv('data/cmc.data',header=None,names=['Age','Education','H_education',
'num_child','Religion', 'Employ',
'H_occupation','living_standard',
'Media_exposure','contraceptive'])
df.head()
Age | Education | H_education | num_child | Religion | Employ | H_occupation | living_standard | Media_exposure | contraceptive | |
---|---|---|---|---|---|---|---|---|---|---|
0 | 24 | 2 | 3 | 3 | 1 | 1 | 2 | 3 | 0 | 1 |
1 | 45 | 1 | 3 | 10 | 1 | 1 | 3 | 4 | 0 | 1 |
2 | 43 | 2 | 3 | 7 | 1 | 1 | 3 | 4 | 0 | 1 |
3 | 42 | 3 | 2 | 9 | 1 | 1 | 3 | 3 | 0 | 1 |
4 | 36 | 3 | 3 | 8 | 1 | 1 | 3 | 2 | 0 | 1 |
df.isnull().any()
Age False Education False H_education False num_child False Religion False Employ False H_occupation False living_standard False Media_exposure False contraceptive False dtype: bool
df.Education.hist()
<matplotlib.axes._subplots.AxesSubplot at 0x12287b630>
df.shape
(1473, 10)
df.contraceptive.hist()
<matplotlib.axes._subplots.AxesSubplot at 0x11a97b6a0>
df.dtypes
Age int64 Education int64 H_education int64 num_child int64 Religion int64 Employ int64 H_occupation int64 living_standard int64 Media_exposure int64 contraceptive int64 dtype: object
def one_hot_encoding(idx):
y = np.zeros((len(idx),max(idx)+1))
y[np.arange(len(idx)), idx] = 1
return y
scaler = StandardScaler()
df[['Age','num_child']] = scaler.fit_transform(df[['Age','num_child']])
x = df[['Age','num_child','Employ','Media_exposure']].values
y = one_hot_encoding(df.contraceptive.values-1)
liv_cats = df.living_standard.max()
edu_cats = df.Education.max()
liv = df.living_standard.values - 1
liv_one_hot = one_hot_encoding(liv)
edu = df.Education.values - 1
edu_one_hot = one_hot_encoding(edu)
train_x, test_x, train_liv, \
test_liv, train_edu, test_edu, train_y, test_y = train_test_split(x,liv_one_hot,edu_one_hot,y,test_size=0.1, random_state=1)
train_x = np.hstack([train_x, train_edu, train_liv])
test_x = np.hstack([test_x, test_edu, test_liv])
train_x.shape
(1325, 12)
train_edu.shape
(1325, 4)
train_liv.shape
(1325, 4)
train_x.shape
(1325, 12)
model = Sequential()
model.add(Dense(input_dim=train_x.shape[1],output_dim=12))
model.add(Activation('relu'))
model.add(Dense(output_dim=3))
model.add(Activation('softmax'))
model.compile(optimizer='adagrad', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(train_x, train_y, nb_epoch=100, verbose=2)
/Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:2: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(units=12, input_dim=12)` from ipykernel import kernelapp as app /Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:4: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(units=3)` /Users/sachin/anaconda/lib/python3.5/site-packages/keras/models.py:826: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`. warnings.warn('The `nb_epoch` argument in `fit` '
Epoch 1/100 0s - loss: 1.1002 - acc: 0.3894 Epoch 2/100 0s - loss: 1.0557 - acc: 0.4158 Epoch 3/100 0s - loss: 1.0371 - acc: 0.4370 Epoch 4/100 0s - loss: 1.0242 - acc: 0.4649 Epoch 5/100 0s - loss: 1.0140 - acc: 0.4762 Epoch 6/100 0s - loss: 1.0059 - acc: 0.4891 Epoch 7/100 0s - loss: 0.9989 - acc: 0.4913 Epoch 8/100 0s - loss: 0.9930 - acc: 0.4936 Epoch 9/100 0s - loss: 0.9877 - acc: 0.5034 Epoch 10/100 0s - loss: 0.9827 - acc: 0.5072 Epoch 11/100 0s - loss: 0.9781 - acc: 0.5147 Epoch 12/100 0s - loss: 0.9742 - acc: 0.5155 Epoch 13/100 0s - loss: 0.9706 - acc: 0.5185 Epoch 14/100 0s - loss: 0.9671 - acc: 0.5253 Epoch 15/100 0s - loss: 0.9642 - acc: 0.5230 Epoch 16/100 0s - loss: 0.9613 - acc: 0.5260 Epoch 17/100 0s - loss: 0.9587 - acc: 0.5283 Epoch 18/100 0s - loss: 0.9563 - acc: 0.5260 Epoch 19/100 0s - loss: 0.9539 - acc: 0.5313 Epoch 20/100 0s - loss: 0.9518 - acc: 0.5313 Epoch 21/100 0s - loss: 0.9497 - acc: 0.5343 Epoch 22/100 0s - loss: 0.9479 - acc: 0.5381 Epoch 23/100 0s - loss: 0.9461 - acc: 0.5389 Epoch 24/100 0s - loss: 0.9443 - acc: 0.5449 Epoch 25/100 0s - loss: 0.9427 - acc: 0.5419 Epoch 26/100 0s - loss: 0.9413 - acc: 0.5426 Epoch 27/100 0s - loss: 0.9397 - acc: 0.5457 Epoch 28/100 0s - loss: 0.9383 - acc: 0.5449 Epoch 29/100 0s - loss: 0.9370 - acc: 0.5442 Epoch 30/100 0s - loss: 0.9357 - acc: 0.5442 Epoch 31/100 0s - loss: 0.9344 - acc: 0.5449 Epoch 32/100 0s - loss: 0.9332 - acc: 0.5487 Epoch 33/100 0s - loss: 0.9321 - acc: 0.5442 Epoch 34/100 0s - loss: 0.9309 - acc: 0.5472 Epoch 35/100 0s - loss: 0.9299 - acc: 0.5479 Epoch 36/100 0s - loss: 0.9289 - acc: 0.5532 Epoch 37/100 0s - loss: 0.9279 - acc: 0.5540 Epoch 38/100 0s - loss: 0.9269 - acc: 0.5517 Epoch 39/100 0s - loss: 0.9260 - acc: 0.5502 Epoch 40/100 0s - loss: 0.9251 - acc: 0.5525 Epoch 41/100 0s - loss: 0.9242 - acc: 0.5540 Epoch 42/100 0s - loss: 0.9234 - acc: 0.5540 Epoch 43/100 0s - loss: 0.9225 - acc: 0.5517 Epoch 44/100 0s - loss: 0.9217 - acc: 0.5540 Epoch 45/100 0s - loss: 0.9209 - acc: 0.5555 Epoch 46/100 0s - loss: 0.9202 - acc: 0.5570 Epoch 47/100 0s - loss: 0.9194 - acc: 0.5570 Epoch 48/100 0s - loss: 0.9186 - acc: 0.5585 Epoch 49/100 0s - loss: 0.9179 - acc: 0.5592 Epoch 50/100 0s - loss: 0.9171 - acc: 0.5600 Epoch 51/100 0s - loss: 0.9164 - acc: 0.5608 Epoch 52/100 0s - loss: 0.9158 - acc: 0.5608 Epoch 53/100 0s - loss: 0.9150 - acc: 0.5600 Epoch 54/100 0s - loss: 0.9144 - acc: 0.5615 Epoch 55/100 0s - loss: 0.9137 - acc: 0.5585 Epoch 56/100 0s - loss: 0.9131 - acc: 0.5592 Epoch 57/100 0s - loss: 0.9126 - acc: 0.5570 Epoch 58/100 0s - loss: 0.9119 - acc: 0.5562 Epoch 59/100 0s - loss: 0.9114 - acc: 0.5600 Epoch 60/100 0s - loss: 0.9108 - acc: 0.5600 Epoch 61/100 0s - loss: 0.9102 - acc: 0.5608 Epoch 62/100 0s - loss: 0.9097 - acc: 0.5600 Epoch 63/100 0s - loss: 0.9092 - acc: 0.5608 Epoch 64/100 0s - loss: 0.9086 - acc: 0.5608 Epoch 65/100 0s - loss: 0.9081 - acc: 0.5630 Epoch 66/100 0s - loss: 0.9077 - acc: 0.5623 Epoch 67/100 0s - loss: 0.9072 - acc: 0.5608 Epoch 68/100 0s - loss: 0.9068 - acc: 0.5660 Epoch 69/100 0s - loss: 0.9064 - acc: 0.5645 Epoch 70/100 0s - loss: 0.9059 - acc: 0.5660 Epoch 71/100 0s - loss: 0.9055 - acc: 0.5645 Epoch 72/100 0s - loss: 0.9051 - acc: 0.5645 Epoch 73/100 0s - loss: 0.9047 - acc: 0.5660 Epoch 74/100 0s - loss: 0.9044 - acc: 0.5660 Epoch 75/100 0s - loss: 0.9040 - acc: 0.5645 Epoch 76/100 0s - loss: 0.9036 - acc: 0.5691 Epoch 77/100 0s - loss: 0.9032 - acc: 0.5706 Epoch 78/100 0s - loss: 0.9029 - acc: 0.5683 Epoch 79/100 0s - loss: 0.9025 - acc: 0.5698 Epoch 80/100 0s - loss: 0.9022 - acc: 0.5698 Epoch 81/100 0s - loss: 0.9018 - acc: 0.5668 Epoch 82/100 0s - loss: 0.9015 - acc: 0.5691 Epoch 83/100 0s - loss: 0.9012 - acc: 0.5713 Epoch 84/100 0s - loss: 0.9008 - acc: 0.5728 Epoch 85/100 0s - loss: 0.9005 - acc: 0.5691 Epoch 86/100 0s - loss: 0.9001 - acc: 0.5728 Epoch 87/100 0s - loss: 0.8999 - acc: 0.5683 Epoch 88/100 0s - loss: 0.8996 - acc: 0.5691 Epoch 89/100 0s - loss: 0.8993 - acc: 0.5713 Epoch 90/100 0s - loss: 0.8989 - acc: 0.5721 Epoch 91/100 0s - loss: 0.8987 - acc: 0.5691 Epoch 92/100 0s - loss: 0.8984 - acc: 0.5675 Epoch 93/100 0s - loss: 0.8981 - acc: 0.5691 Epoch 94/100 0s - loss: 0.8978 - acc: 0.5691 Epoch 95/100 0s - loss: 0.8975 - acc: 0.5698 Epoch 96/100 0s - loss: 0.8972 - acc: 0.5698 Epoch 97/100 0s - loss: 0.8970 - acc: 0.5713 Epoch 98/100 0s - loss: 0.8967 - acc: 0.5706 Epoch 99/100 0s - loss: 0.8964 - acc: 0.5691 Epoch 100/100 0s - loss: 0.8962 - acc: 0.5721
<keras.callbacks.History at 0x1212fee10>
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 12) 156 _________________________________________________________________ activation_1 (Activation) (None, 12) 0 _________________________________________________________________ dense_2 (Dense) (None, 3) 39 _________________________________________________________________ activation_2 (Activation) (None, 3) 0 ================================================================= Total params: 195.0 Trainable params: 195.0 Non-trainable params: 0.0 _________________________________________________________________
for w in model.get_weights():
print(w.shape)
(12, 12) (12,) (12, 3) (3,)
model.evaluate(test_x, test_y, batch_size=256)
148/148 [==============================] - 0s
[0.85758495330810547, 0.587837815284729]
model.predict(test_x[:10])
array([[ 0.39239073, 0.2203065 , 0.38730279], [ 0.82135081, 0.10566427, 0.07298491], [ 0.25139767, 0.17943899, 0.56916332], [ 0.3676849 , 0.33580911, 0.29650599], [ 0.75309271, 0.13132168, 0.11558564], [ 0.16729502, 0.54943871, 0.28326628], [ 0.18573713, 0.45595431, 0.35830855], [ 0.8188768 , 0.10733887, 0.0737843 ], [ 0.73907691, 0.04200678, 0.21891631], [ 0.64818466, 0.11329354, 0.2385218 ]], dtype=float32)
liv
array([2, 3, 3, ..., 3, 1, 3])
train_x, test_x, train_liv, \
test_liv, train_edu, test_edu, train_y, test_y = train_test_split(x,liv,edu,y,test_size=0.1, random_state=1)
# Input layer for religion
encoder_liv = Sequential()
encoder_liv.add(Embedding(liv_cats,4,input_length=1))
encoder_liv.add(Flatten())
# Input layer for religion
encoder_edu = Sequential()
encoder_edu.add(Embedding(edu_cats,4,input_length=1))
encoder_edu.add(Flatten())
# Input layer for triggers(x_b)
dense_x = Sequential()
dense_x.add(Dense(4, input_dim=x.shape[1]))
model = Sequential()
model.add(Merge([encoder_liv, encoder_edu, dense_x], mode='concat'))
# model.add(Activation('relu'))
model.add(Dense(output_dim=12))
model.add(Activation('relu'))
model.add(Dense(output_dim=3))
model.add(Activation('softmax'))
model.compile(optimizer='adagrad', loss='categorical_crossentropy', metrics=['accuracy'])
/Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:16: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc. /Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:18: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(units=12)` /Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:20: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(units=3)`
model.fit([train_liv[:,None], train_edu[:,None], train_x], train_y, nb_epoch=100, verbose=2)
/Users/sachin/anaconda/lib/python3.5/site-packages/keras/models.py:826: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`. warnings.warn('The `nb_epoch` argument in `fit` '
Epoch 1/100 0s - loss: 1.1477 - acc: 0.3442 Epoch 2/100 0s - loss: 1.0408 - acc: 0.4242 Epoch 3/100 0s - loss: 1.0129 - acc: 0.4679 Epoch 4/100 0s - loss: 0.9960 - acc: 0.4755 Epoch 5/100 0s - loss: 0.9841 - acc: 0.4906 Epoch 6/100 0s - loss: 0.9750 - acc: 0.4981 Epoch 7/100 0s - loss: 0.9672 - acc: 0.5049 Epoch 8/100 0s - loss: 0.9604 - acc: 0.5034 Epoch 9/100 0s - loss: 0.9540 - acc: 0.5140 Epoch 10/100 0s - loss: 0.9490 - acc: 0.5223 Epoch 11/100 0s - loss: 0.9446 - acc: 0.5260 Epoch 12/100 0s - loss: 0.9408 - acc: 0.5260 Epoch 13/100 0s - loss: 0.9377 - acc: 0.5253 Epoch 14/100 0s - loss: 0.9350 - acc: 0.5374 Epoch 15/100 0s - loss: 0.9326 - acc: 0.5358 Epoch 16/100 0s - loss: 0.9301 - acc: 0.5374 Epoch 17/100 0s - loss: 0.9283 - acc: 0.5336 Epoch 18/100 0s - loss: 0.9266 - acc: 0.5404 Epoch 19/100 0s - loss: 0.9249 - acc: 0.5404 Epoch 20/100 0s - loss: 0.9236 - acc: 0.5434 Epoch 21/100 0s - loss: 0.9223 - acc: 0.5419 Epoch 22/100 0s - loss: 0.9213 - acc: 0.5457 Epoch 23/100 0s - loss: 0.9201 - acc: 0.5457 Epoch 24/100 0s - loss: 0.9190 - acc: 0.5464 Epoch 25/100 0s - loss: 0.9181 - acc: 0.5479 Epoch 26/100 0s - loss: 0.9172 - acc: 0.5464 Epoch 27/100 0s - loss: 0.9165 - acc: 0.5472 Epoch 28/100 0s - loss: 0.9157 - acc: 0.5547 Epoch 29/100 0s - loss: 0.9151 - acc: 0.5509 Epoch 30/100 0s - loss: 0.9143 - acc: 0.5562 Epoch 31/100 0s - loss: 0.9137 - acc: 0.5577 Epoch 32/100 0s - loss: 0.9132 - acc: 0.5592 Epoch 33/100 0s - loss: 0.9125 - acc: 0.5540 Epoch 34/100 0s - loss: 0.9121 - acc: 0.5555 Epoch 35/100 0s - loss: 0.9115 - acc: 0.5555 Epoch 36/100 0s - loss: 0.9111 - acc: 0.5592 Epoch 37/100 0s - loss: 0.9105 - acc: 0.5600 Epoch 38/100 0s - loss: 0.9101 - acc: 0.5592 Epoch 39/100 0s - loss: 0.9096 - acc: 0.5630 Epoch 40/100 0s - loss: 0.9093 - acc: 0.5592 Epoch 41/100 0s - loss: 0.9088 - acc: 0.5600 Epoch 42/100 0s - loss: 0.9084 - acc: 0.5623 Epoch 43/100 0s - loss: 0.9081 - acc: 0.5600 Epoch 44/100 0s - loss: 0.9077 - acc: 0.5562 Epoch 45/100 0s - loss: 0.9075 - acc: 0.5585 Epoch 46/100 0s - loss: 0.9071 - acc: 0.5570 Epoch 47/100 0s - loss: 0.9068 - acc: 0.5585 Epoch 48/100 0s - loss: 0.9064 - acc: 0.5600 Epoch 49/100 0s - loss: 0.9061 - acc: 0.5623 Epoch 50/100 0s - loss: 0.9060 - acc: 0.5600 Epoch 51/100 0s - loss: 0.9056 - acc: 0.5592 Epoch 52/100 0s - loss: 0.9053 - acc: 0.5577 Epoch 53/100 0s - loss: 0.9050 - acc: 0.5577 Epoch 54/100 0s - loss: 0.9048 - acc: 0.5577 Epoch 55/100 0s - loss: 0.9045 - acc: 0.5585 Epoch 56/100 0s - loss: 0.9043 - acc: 0.5608 Epoch 57/100 0s - loss: 0.9040 - acc: 0.5547 Epoch 58/100 0s - loss: 0.9037 - acc: 0.5600 Epoch 59/100 0s - loss: 0.9035 - acc: 0.5608 Epoch 60/100 0s - loss: 0.9032 - acc: 0.5555 Epoch 61/100 0s - loss: 0.9030 - acc: 0.5600 Epoch 62/100 0s - loss: 0.9030 - acc: 0.5585 Epoch 63/100 0s - loss: 0.9026 - acc: 0.5608 Epoch 64/100 0s - loss: 0.9023 - acc: 0.5585 Epoch 65/100 0s - loss: 0.9021 - acc: 0.5592 Epoch 66/100 0s - loss: 0.9019 - acc: 0.5592 Epoch 67/100 0s - loss: 0.9017 - acc: 0.5555 Epoch 68/100 0s - loss: 0.9016 - acc: 0.5570 Epoch 69/100 0s - loss: 0.9012 - acc: 0.5577 Epoch 70/100 0s - loss: 0.9011 - acc: 0.5615 Epoch 71/100 0s - loss: 0.9009 - acc: 0.5585 Epoch 72/100 0s - loss: 0.9007 - acc: 0.5608 Epoch 73/100 0s - loss: 0.9005 - acc: 0.5585 Epoch 74/100 0s - loss: 0.9003 - acc: 0.5577 Epoch 75/100 0s - loss: 0.9001 - acc: 0.5562 Epoch 76/100 0s - loss: 0.8999 - acc: 0.5562 Epoch 77/100 0s - loss: 0.8998 - acc: 0.5562 Epoch 78/100 0s - loss: 0.8996 - acc: 0.5555 Epoch 79/100 0s - loss: 0.8993 - acc: 0.5592 Epoch 80/100 0s - loss: 0.8992 - acc: 0.5570 Epoch 81/100 0s - loss: 0.8990 - acc: 0.5577 Epoch 82/100 0s - loss: 0.8988 - acc: 0.5592 Epoch 83/100 0s - loss: 0.8987 - acc: 0.5585 Epoch 84/100 0s - loss: 0.8986 - acc: 0.5585 Epoch 85/100 0s - loss: 0.8984 - acc: 0.5623 Epoch 86/100 0s - loss: 0.8983 - acc: 0.5608 Epoch 87/100 0s - loss: 0.8982 - acc: 0.5608 Epoch 88/100 0s - loss: 0.8979 - acc: 0.5630 Epoch 89/100 0s - loss: 0.8979 - acc: 0.5623 Epoch 90/100 0s - loss: 0.8977 - acc: 0.5630 Epoch 91/100 0s - loss: 0.8974 - acc: 0.5630 Epoch 92/100 0s - loss: 0.8973 - acc: 0.5615 Epoch 93/100 0s - loss: 0.8972 - acc: 0.5608 Epoch 94/100 0s - loss: 0.8971 - acc: 0.5615 Epoch 95/100 0s - loss: 0.8970 - acc: 0.5623 Epoch 96/100 0s - loss: 0.8968 - acc: 0.5608 Epoch 97/100 0s - loss: 0.8967 - acc: 0.5615 Epoch 98/100 0s - loss: 0.8966 - acc: 0.5600 Epoch 99/100 0s - loss: 0.8964 - acc: 0.5608 Epoch 100/100 0s - loss: 0.8964 - acc: 0.5600
<keras.callbacks.History at 0x121d8ddd8>
dense_x.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_3 (Dense) (None, 4) 20 ================================================================= Total params: 20.0 Trainable params: 20 Non-trainable params: 0.0 _________________________________________________________________
encoder_liv.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding_1 (Embedding) (None, 1, 4) 16 _________________________________________________________________ flatten_1 (Flatten) (None, 4) 0 ================================================================= Total params: 16.0 Trainable params: 16.0 Non-trainable params: 0.0 _________________________________________________________________
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= merge_1 (Merge) (None, 12) 0 _________________________________________________________________ dense_4 (Dense) (None, 12) 156 _________________________________________________________________ activation_3 (Activation) (None, 12) 0 _________________________________________________________________ dense_5 (Dense) (None, 3) 39 _________________________________________________________________ activation_4 (Activation) (None, 3) 0 ================================================================= Total params: 247.0 Trainable params: 247.0 Non-trainable params: 0.0 _________________________________________________________________
for w in model.get_weights():
print(w.shape)
--------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-23-8e2f9d3764a7> in <module>() 1 for w in model.get_weights(): ----> 2 print(w.shape) AttributeError: 'list' object has no attribute 'shape'
w
[array([[-0.10935115, -0.23031998, 0.23564951, 0.3424432 ], [-0.06168354, -0.05825301, 0.111118 , 0.16586818], [-0.15200721, -0.0934339 , -0.10459773, 0.00161025], [-0.04321436, 0.05898349, -0.01321368, -0.08363315]], dtype=float32)]
a = model.get_weights()
a
[[array([[-0.10935115, -0.23031998, 0.23564951, 0.3424432 ], [-0.06168354, -0.05825301, 0.111118 , 0.16586818], [-0.15200721, -0.0934339 , -0.10459773, 0.00161025], [-0.04321436, 0.05898349, -0.01321368, -0.08363315]], dtype=float32)], [], [array([[ 0.3322866 , 0.19930699, -0.24251796, 0.24181931], [ 0.19793722, 0.12741166, -0.18478565, 0.16072831], [ 0.00148587, -0.0822821 , -0.00326628, 0.04515119], [-0.09646862, -0.2633568 , 0.16113301, -0.19863592]], dtype=float32)], [], [array([[-0.59816033, 0.87062579, -0.2976734 , 0.08506605], [ 0.28670722, 0.01459732, -0.67885333, 0.91728342], [-0.72324103, -0.32768196, 0.8207261 , 0.69913387], [-0.20462862, 0.6349501 , -0.34206402, 0.06973144]], dtype=float32), array([ 0.08511487, -0.05479484, 0.09653779, 0.01787456], dtype=float32)], [array([[ 0.54720604, 0.20560427, 0.38046002, 0.06325047, -0.53751922, -0.06238003, -0.07022222, 0.33957773, 0.19087823, -0.3943564 , 0.07500011, 0.28335726], [ 0.66748869, 0.43769655, 0.27608281, -0.03321984, -0.43980169, -0.39402112, 0.41173798, 0.4726226 , -0.10022564, -0.39347824, -0.65959972, -0.18946621], [-0.23607142, 0.28477719, -0.28498721, 0.41613233, 0.15728261, -0.17061651, -0.1701867 , -0.03094537, -0.45825756, 0.35427347, -0.20373616, 0.4309096 ], [-0.68063968, 0.42256722, 0.08898511, 0.44825551, 0.745767 , -0.15242647, -0.27827993, -0.52321166, -0.14772928, 0.31725135, 0.41540977, 0.20260219], [ 0.11326507, 0.29595992, -0.66411144, 0.03979665, 0.78415918, -0.12158706, -0.40152088, -0.05005921, -0.10276611, 0.52777141, 0.22086678, 0.61187786], [-0.81348175, -0.54370284, -0.06375849, 0.55344445, -0.18702853, -0.17707211, -0.28065667, -0.51284409, -0.58049625, 0.61143988, -0.22997195, 0.21264738], [ 0.52765691, 0.48952475, 0.47147927, -0.46761283, -0.35750964, 0.13695255, 0.52752954, 0.71626884, 0.65563965, -0.7301867 , -0.139073 , 0.04272588], [-0.51113981, -0.36798111, -0.55281842, -0.30524713, 0.39200947, 0.44922075, -0.4600637 , -0.09046047, -0.42538816, 0.43586993, -0.00330608, 0.40157044], [ 0.07566521, 0.77746761, -0.05694147, -0.00362857, -0.10556948, 0.01500559, -0.02078186, -0.28349164, -0.32414779, 0.51029384, -0.64134562, -0.24186224], [ 0.08324727, 0.33779496, 0.23709755, -0.3381888 , 0.16273189, 0.47784749, 0.04162871, 0.30976474, -0.31543013, -0.28823802, 0.62342113, 0.42441621], [-0.28952643, 0.35541049, -0.15565668, 0.2239645 , 0.46108943, 0.58152092, 0.39327046, 0.1154022 , 0.09159836, 0.26722667, -0.09292367, 0.15957126], [-0.01987584, -0.06251051, 0.0218032 , -0.24661671, -0.08240297, -0.84416002, 0.33008894, -0.28735343, 0.23902059, 0.50154585, 0.0804465 , 0.17921968]], dtype=float32), array([ -1.18326025e-04, 1.35194603e-02, -4.79373112e-02, -2.96827797e-02, 1.12170853e-01, 5.46757542e-02, 4.18406017e-02, -1.61119565e-01, 3.99559699e-02, 1.43326208e-01, 5.45555726e-03, -3.65096107e-02], dtype=float32)], [], [array([[-0.60570943, 0.19913501, 0.43104902], [-0.37961754, -0.03606199, 0.33234817], [-0.05652641, 0.38592601, 0.12417495], [ 0.49130049, -0.45519802, -0.04951563], [ 0.11304783, -0.82271665, 0.11088244], [ 0.85395575, -0.00952558, -0.67157847], [-0.13419652, 0.45367789, 0.18071729], [-0.14250514, -0.29016131, -0.04574362], [-0.43520772, 0.43990734, 0.22246923], [-0.57091314, -0.7859264 , -0.24046065], [ 0.02203327, -0.20768185, -0.85033274], [ 0.2845436 , -0.80705798, -0.51982474]], dtype=float32), array([-0.08315417, -0.05141847, 0.17288126], dtype=float32)], []]
model.evaluate([test_liv[:,None], test_edu[:,None], test_x],test_y, batch_size=256)
148/148 [==============================] - 0s
[0.86288201808929443, 0.60810810327529907]
p = model.predict([test_liv[:,None], test_edu[:,None], test_x], batch_size=256)
p[:5]
array([[ 0.29773653, 0.26741281, 0.43485063], [ 0.77942479, 0.11813645, 0.10243875], [ 0.18719749, 0.21930861, 0.59349388], [ 0.37331343, 0.36969444, 0.25699213], [ 0.69447452, 0.15733764, 0.14818783]], dtype=float32)
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= merge_1 (Merge) (None, 12) 0 _________________________________________________________________ dense_4 (Dense) (None, 12) 156 _________________________________________________________________ activation_3 (Activation) (None, 12) 0 _________________________________________________________________ dense_5 (Dense) (None, 3) 39 _________________________________________________________________ activation_4 (Activation) (None, 3) 0 ================================================================= Total params: 247.0 Trainable params: 247.0 Non-trainable params: 0.0 _________________________________________________________________
model = Sequential()
model.add(Dense(4, input_dim=train_x.shape[1]))
model.add(Activation('relu'))
model.add(Dense(output_dim=3))
model.add(Activation('softmax'))
model.compile(optimizer='adagrad', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(train_x, train_y, nb_epoch=100)
/Users/sachin/anaconda/lib/python3.5/site-packages/ipykernel/__main__.py:4: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(units=3)` /Users/sachin/anaconda/lib/python3.5/site-packages/keras/models.py:826: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`. warnings.warn('The `nb_epoch` argument in `fit` '
Epoch 1/100 1325/1325 [==============================] - 0s - loss: 0.6348 - acc: 0.6531 Epoch 2/100 1325/1325 [==============================] - 0s - loss: 0.6224 - acc: 0.6644 Epoch 3/100 1325/1325 [==============================] - 0s - loss: 0.6165 - acc: 0.6662 Epoch 4/100 1325/1325 [==============================] - 0s - loss: 0.6129 - acc: 0.6694 Epoch 5/100 1325/1325 [==============================] - 0s - loss: 0.6105 - acc: 0.6699 Epoch 6/100 1325/1325 [==============================] - 0s - loss: 0.6086 - acc: 0.6699 Epoch 7/100 1325/1325 [==============================] - 0s - loss: 0.6073 - acc: 0.6737 Epoch 8/100 1325/1325 [==============================] - 0s - loss: 0.6062 - acc: 0.6704 Epoch 9/100 1325/1325 [==============================] - 0s - loss: 0.6053 - acc: 0.6707 Epoch 10/100 1325/1325 [==============================] - 0s - loss: 0.6045 - acc: 0.6707 Epoch 11/100 1325/1325 [==============================] - 0s - loss: 0.6038 - acc: 0.6709 Epoch 12/100 1325/1325 [==============================] - 0s - loss: 0.6032 - acc: 0.6722 Epoch 13/100 1325/1325 [==============================] - 0s - loss: 0.6026 - acc: 0.6727 Epoch 14/100 1325/1325 [==============================] - 0s - loss: 0.6020 - acc: 0.6719 Epoch 15/100 1325/1325 [==============================] - 0s - loss: 0.6015 - acc: 0.6717 Epoch 16/100 1325/1325 [==============================] - 0s - loss: 0.6010 - acc: 0.6735 Epoch 17/100 1325/1325 [==============================] - 0s - loss: 0.6006 - acc: 0.6740 Epoch 18/100 1325/1325 [==============================] - 0s - loss: 0.6002 - acc: 0.6730 Epoch 19/100 1325/1325 [==============================] - 0s - loss: 0.5998 - acc: 0.6752 Epoch 20/100 1325/1325 [==============================] - 0s - loss: 0.5994 - acc: 0.6762 Epoch 21/100 1325/1325 [==============================] - 0s - loss: 0.5990 - acc: 0.6770 Epoch 22/100 1325/1325 [==============================] - 0s - loss: 0.5987 - acc: 0.6770 Epoch 23/100 1325/1325 [==============================] - 0s - loss: 0.5983 - acc: 0.6777 Epoch 24/100 1325/1325 [==============================] - 0s - loss: 0.5979 - acc: 0.6770 Epoch 25/100 1325/1325 [==============================] - 0s - loss: 0.5976 - acc: 0.6785 Epoch 26/100 1325/1325 [==============================] - 0s - loss: 0.5973 - acc: 0.6790 Epoch 27/100 1325/1325 [==============================] - 0s - loss: 0.5970 - acc: 0.6792 Epoch 28/100 1325/1325 [==============================] - 0s - loss: 0.5966 - acc: 0.6787 Epoch 29/100 1325/1325 [==============================] - 0s - loss: 0.5963 - acc: 0.6800 Epoch 30/100 1325/1325 [==============================] - 0s - loss: 0.5960 - acc: 0.6795 Epoch 31/100 1325/1325 [==============================] - 0s - loss: 0.5957 - acc: 0.6790 Epoch 32/100 1325/1325 [==============================] - 0s - loss: 0.5954 - acc: 0.6790 Epoch 33/100 1325/1325 [==============================] - 0s - loss: 0.5951 - acc: 0.6790 Epoch 34/100 1325/1325 [==============================] - 0s - loss: 0.5949 - acc: 0.6810 Epoch 35/100 1325/1325 [==============================] - 0s - loss: 0.5946 - acc: 0.6792 Epoch 36/100 1325/1325 [==============================] - 0s - loss: 0.5943 - acc: 0.6800 Epoch 37/100 1325/1325 [==============================] - 0s - loss: 0.5941 - acc: 0.6818 Epoch 38/100 1325/1325 [==============================] - 0s - loss: 0.5938 - acc: 0.6803 Epoch 39/100 1325/1325 [==============================] - 0s - loss: 0.5935 - acc: 0.6813 Epoch 40/100 1325/1325 [==============================] - 0s - loss: 0.5933 - acc: 0.6808 Epoch 41/100 1325/1325 [==============================] - 0s - loss: 0.5930 - acc: 0.6813 Epoch 42/100 1325/1325 [==============================] - 0s - loss: 0.5928 - acc: 0.6820 Epoch 43/100 1325/1325 [==============================] - 0s - loss: 0.5926 - acc: 0.6835 Epoch 44/100 1325/1325 [==============================] - 0s - loss: 0.5923 - acc: 0.6843 Epoch 45/100 1325/1325 [==============================] - 0s - loss: 0.5921 - acc: 0.6840 Epoch 46/100 1325/1325 [==============================] - 0s - loss: 0.5919 - acc: 0.6835 Epoch 47/100 1325/1325 [==============================] - 0s - loss: 0.5917 - acc: 0.6848 Epoch 48/100 1325/1325 [==============================] - 0s - loss: 0.5915 - acc: 0.6840 Epoch 49/100 1325/1325 [==============================] - 0s - loss: 0.5913 - acc: 0.6848 Epoch 50/100 1325/1325 [==============================] - 0s - loss: 0.5911 - acc: 0.6843 Epoch 51/100 1325/1325 [==============================] - 0s - loss: 0.5909 - acc: 0.6853 Epoch 52/100 1325/1325 [==============================] - 0s - loss: 0.5907 - acc: 0.6850 Epoch 53/100 1325/1325 [==============================] - 0s - loss: 0.5905 - acc: 0.6850 Epoch 54/100 1325/1325 [==============================] - 0s - loss: 0.5903 - acc: 0.6855 Epoch 55/100 1325/1325 [==============================] - 0s - loss: 0.5901 - acc: 0.6853 Epoch 56/100 1325/1325 [==============================] - 0s - loss: 0.5899 - acc: 0.6863 Epoch 57/100 1325/1325 [==============================] - 0s - loss: 0.5897 - acc: 0.6858 Epoch 58/100 1325/1325 [==============================] - 0s - loss: 0.5895 - acc: 0.6858 Epoch 59/100 1325/1325 [==============================] - 0s - loss: 0.5894 - acc: 0.6870 Epoch 60/100 1325/1325 [==============================] - 0s - loss: 0.5891 - acc: 0.6868 Epoch 61/100 1325/1325 [==============================] - 0s - loss: 0.5890 - acc: 0.6865 Epoch 62/100 1325/1325 [==============================] - 0s - loss: 0.5888 - acc: 0.6875 Epoch 63/100 1325/1325 [==============================] - 0s - loss: 0.5886 - acc: 0.6881 Epoch 64/100 1325/1325 [==============================] - 0s - loss: 0.5884 - acc: 0.6881 Epoch 65/100 1325/1325 [==============================] - 0s - loss: 0.5882 - acc: 0.6883 Epoch 66/100 1325/1325 [==============================] - 0s - loss: 0.5881 - acc: 0.6891 Epoch 67/100 1325/1325 [==============================] - 0s - loss: 0.5879 - acc: 0.6893 Epoch 68/100 1325/1325 [==============================] - 0s - loss: 0.5877 - acc: 0.6896 Epoch 69/100 1325/1325 [==============================] - 0s - loss: 0.5875 - acc: 0.6891 Epoch 70/100 1325/1325 [==============================] - 0s - loss: 0.5874 - acc: 0.6891 Epoch 71/100 1325/1325 [==============================] - 0s - loss: 0.5872 - acc: 0.6896 Epoch 72/100 1325/1325 [==============================] - 0s - loss: 0.5871 - acc: 0.6893 Epoch 73/100 1325/1325 [==============================] - 0s - loss: 0.5869 - acc: 0.6893 Epoch 74/100 1325/1325 [==============================] - 0s - loss: 0.5867 - acc: 0.6901 Epoch 75/100 1325/1325 [==============================] - 0s - loss: 0.5866 - acc: 0.6901 Epoch 76/100 1325/1325 [==============================] - 0s - loss: 0.5864 - acc: 0.6901 Epoch 77/100 1325/1325 [==============================] - 0s - loss: 0.5863 - acc: 0.6901 Epoch 78/100 1325/1325 [==============================] - 0s - loss: 0.5861 - acc: 0.6901 Epoch 79/100 1325/1325 [==============================] - 0s - loss: 0.5859 - acc: 0.6903 Epoch 80/100 1325/1325 [==============================] - 0s - loss: 0.5858 - acc: 0.6901 Epoch 81/100 1325/1325 [==============================] - 0s - loss: 0.5857 - acc: 0.6901 Epoch 82/100 1325/1325 [==============================] - 0s - loss: 0.5855 - acc: 0.6901 Epoch 83/100 1325/1325 [==============================] - 0s - loss: 0.5854 - acc: 0.6896 Epoch 84/100 1325/1325 [==============================] - 0s - loss: 0.5852 - acc: 0.6903 Epoch 85/100 1325/1325 [==============================] - 0s - loss: 0.5851 - acc: 0.6901 Epoch 86/100 1325/1325 [==============================] - 0s - loss: 0.5850 - acc: 0.6903 Epoch 87/100 1325/1325 [==============================] - 0s - loss: 0.5849 - acc: 0.6903 Epoch 88/100 1325/1325 [==============================] - 0s - loss: 0.5847 - acc: 0.6906 Epoch 89/100 1325/1325 [==============================] - 0s - loss: 0.5846 - acc: 0.6906 Epoch 90/100 1325/1325 [==============================] - 0s - loss: 0.5845 - acc: 0.6906 Epoch 91/100 1325/1325 [==============================] - 0s - loss: 0.5844 - acc: 0.6911 Epoch 92/100 1325/1325 [==============================] - 0s - loss: 0.5842 - acc: 0.6918 Epoch 93/100 1325/1325 [==============================] - 0s - loss: 0.5841 - acc: 0.6911 Epoch 94/100 1325/1325 [==============================] - 0s - loss: 0.5840 - acc: 0.6916 Epoch 95/100 1325/1325 [==============================] - 0s - loss: 0.5838 - acc: 0.6906 Epoch 96/100 1325/1325 [==============================] - 0s - loss: 0.5837 - acc: 0.6916 Epoch 97/100 1325/1325 [==============================] - 0s - loss: 0.5836 - acc: 0.6908 Epoch 98/100 1325/1325 [==============================] - 0s - loss: 0.5835 - acc: 0.6913 Epoch 99/100 1325/1325 [==============================] - 0s - loss: 0.5833 - acc: 0.6906 Epoch 100/100 1325/1325 [==============================] - 0s - loss: 0.5832 - acc: 0.6911
<keras.callbacks.History at 0x1227b34a8>
model.evaluate(test_x,test_y,batch_size=256)
148/148 [==============================] - 0s
[0.56608289480209351, 0.70945948362350464]
model.fit?