import numpy as np
import numpy.random as rng
import tensorflow as tf
n = 1000
din = np.random.randint(0,2, (n, 2)).astype(bool)
out = np.logical_xor(din[:,0], din[:,1])
out = np.zeros((n, 2), dtype="bool")
out[:,1] = np.logical_xor(din[:,0], din[:,1])
out[:,0] = True ^ out[:,1]
x = tf.placeholder("bool", shape=[None,2])
x1 = tf.cast(x, "float")
with tf.name_scope('Layer1'):
W1 = tf.Variable(tf.random_normal([2,10]))
b1 = tf.Variable(tf.random_normal([10]))
hidden = tf.nn.relu(tf.matmul(x1, W1) + b1)
with tf.name_scope('Layer2'):
W2 = tf.Variable(tf.random_normal([10,2]))
b2 = tf.Variable(tf.random_normal([2]))
hidden1 = tf.matmul(hidden, W2) + b2
with tf.name_scope('output'):
y = tf.nn.softmax(hidden1)
y_val = tf.arg_max(y,1)
y_ = tf.placeholder("float",[None,2])
loss = tf.reduce_sum(-y_*tf.log(y))
init = tf.initialize_all_variables()
optimizer = tf.train.GradientDescentOptimizer(.001).minimize(loss)
tf.scalar_summary("cost", loss)
merged = tf.merge_all_summaries()
fd = {x: din, y_:out}
with tf.Session() as sess:
sess.run(init)
summary_writer = tf.train.SummaryWriter("./logs/run1")
summary_writer.add_graph(y.graph)
print(sess.run(loss, feed_dict=fd))
for step in range(40):
summ, cost_curr, _ = sess.run([merged, loss, optimizer], feed_dict={x: din, y_: out})
if step % 10 == 0 : print("Iteration", step, "Loss", cost_curr)
summary_writer.add_summary(summ, step)
# print(sess.run(y_val, feed_dict={x:[[0,0], [1,1], [1,0], [0,1]]}))
3329.73 Iteration 0 Loss 3329.73 Iteration 10 Loss 277.95 Iteration 20 Loss 126.06 Iteration 30 Loss 72.0056