# These are all the modules we'll be using later. Make sure you can import them
# before proceeding further.
from __future__ import print_function
import numpy as np
import tensorflow as tf
from six.moves import cPickle as pickle
from six.moves import range
pickle_file = 'notMNIST.pickle'
with open(pickle_file, 'rb') as f:
save = pickle.load(f)
train_dataset = save['train_dataset']
train_labels = save['train_labels']
valid_dataset = save['valid_dataset']
valid_labels = save['valid_labels']
test_dataset = save['test_dataset']
test_labels = save['test_labels']
del save # hint to help gc free up memory
print('Training set', train_dataset.shape, train_labels.shape)
print('Validation set', valid_dataset.shape, valid_labels.shape)
print('Test set', test_dataset.shape, test_labels.shape)
Training set (200000, 28, 28) (200000,) Validation set (10000, 28, 28) (10000,) Test set (10000, 28, 28) (10000,)
Reformat into a TensorFlow-friendly shape:
image_size = 28
num_labels = 10
num_channels = 1 # grayscale
import numpy as np
def reformat(dataset, labels):
dataset = dataset.reshape(
(-1, image_size, image_size, num_channels)).astype(np.float32)
labels = (np.arange(num_labels) == labels[:,None]).astype(np.float32)
return dataset, labels
train_dataset, train_labels = reformat(train_dataset, train_labels)
valid_dataset, valid_labels = reformat(valid_dataset, valid_labels)
test_dataset, test_labels = reformat(test_dataset, test_labels)
print('Training set', train_dataset.shape, train_labels.shape)
print('Validation set', valid_dataset.shape, valid_labels.shape)
print('Test set', test_dataset.shape, test_labels.shape)
Training set (200000, 28, 28, 1) (200000, 10) Validation set (10000, 28, 28, 1) (10000, 10) Test set (10000, 28, 28, 1) (10000, 10)
def accuracy(predictions, labels):
return (100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1))
/ predictions.shape[0])
Let's build a small network with two convolutional layers, followed by one fully connected layer. Convolutional networks are more expensive computationally, so we'll limit its depth and number of fully connected nodes.
batch_size = 16
patch_size = 5
depth = 16
num_hidden = 64
graph = tf.Graph()
with graph.as_default():
# Input data.
tf_train_dataset = tf.placeholder(
tf.float32, shape=(batch_size, image_size, image_size, num_channels))
tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels))
tf_valid_dataset = tf.constant(valid_dataset)
tf_test_dataset = tf.constant(test_dataset)
# Variables.
layer1_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, num_channels, depth], stddev=0.1))
layer1_biases = tf.Variable(tf.zeros([depth]))
layer2_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, depth, depth], stddev=0.1))
layer2_biases = tf.Variable(tf.constant(1.0, shape=[depth]))
layer3_weights = tf.Variable(tf.truncated_normal(
[image_size // 4 * image_size // 4 * depth, num_hidden], stddev=0.1))
layer3_biases = tf.Variable(tf.constant(1.0, shape=[num_hidden]))
layer4_weights = tf.Variable(tf.truncated_normal(
[num_hidden, num_labels], stddev=0.1))
layer4_biases = tf.Variable(tf.constant(1.0, shape=[num_labels]))
# Model.
def model(data):
conv = tf.nn.conv2d(data, layer1_weights, [1, 2, 2, 1], padding='SAME')
hidden = tf.nn.relu(conv + layer1_biases)
conv = tf.nn.conv2d(hidden, layer2_weights, [1, 2, 2, 1], padding='SAME')
hidden = tf.nn.relu(conv + layer2_biases)
shape = hidden.get_shape().as_list()
reshape = tf.reshape(hidden, [shape[0], shape[1] * shape[2] * shape[3]])
hidden = tf.nn.relu(tf.matmul(reshape, layer3_weights) + layer3_biases)
return tf.matmul(hidden, layer4_weights) + layer4_biases
# Training computation.
logits = model(tf_train_dataset)
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=tf_train_labels, logits=logits))
# Optimizer.
optimizer = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
# Predictions for the training, validation, and test data.
train_prediction = tf.nn.softmax(logits)
valid_prediction = tf.nn.softmax(model(tf_valid_dataset))
test_prediction = tf.nn.softmax(model(tf_test_dataset))
num_steps = 1001
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
print('Initialized')
for step in range(num_steps):
offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
batch_data = train_dataset[offset:(offset + batch_size), :, :, :]
batch_labels = train_labels[offset:(offset + batch_size), :]
feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels}
_, l, predictions = session.run(
[optimizer, loss, train_prediction], feed_dict=feed_dict)
if (step % 50 == 0):
print('Minibatch loss at step %d: %f' % (step, l))
print('Minibatch accuracy: %.1f%%' % accuracy(predictions, batch_labels))
print('Validation accuracy: %.1f%%' % accuracy(
valid_prediction.eval(), valid_labels))
print('Test accuracy: %.1f%%' % accuracy(test_prediction.eval(), test_labels))
Initialized Minibatch loss at step 0: 3.213093 Minibatch accuracy: 18.8% Validation accuracy: 10.0% Minibatch loss at step 50: 1.705882 Minibatch accuracy: 25.0% Validation accuracy: 44.3% Minibatch loss at step 100: 1.193937 Minibatch accuracy: 50.0% Validation accuracy: 62.9% Minibatch loss at step 150: 0.464993 Minibatch accuracy: 93.8% Validation accuracy: 72.7% Minibatch loss at step 200: 0.703133 Minibatch accuracy: 81.2% Validation accuracy: 77.4% Minibatch loss at step 250: 1.254208 Minibatch accuracy: 68.8% Validation accuracy: 77.5% Minibatch loss at step 300: 0.312940 Minibatch accuracy: 93.8% Validation accuracy: 79.2% Minibatch loss at step 350: 0.552538 Minibatch accuracy: 93.8% Validation accuracy: 76.3% Minibatch loss at step 400: 0.359237 Minibatch accuracy: 93.8% Validation accuracy: 80.2% Minibatch loss at step 450: 0.724704 Minibatch accuracy: 87.5% Validation accuracy: 78.5% Minibatch loss at step 500: 0.640203 Minibatch accuracy: 87.5% Validation accuracy: 80.4% Minibatch loss at step 550: 0.820833 Minibatch accuracy: 75.0% Validation accuracy: 80.8% Minibatch loss at step 600: 0.354834 Minibatch accuracy: 93.8% Validation accuracy: 81.7% Minibatch loss at step 650: 0.885277 Minibatch accuracy: 68.8% Validation accuracy: 80.8% Minibatch loss at step 700: 1.020606 Minibatch accuracy: 62.5% Validation accuracy: 81.5% Minibatch loss at step 750: 0.087126 Minibatch accuracy: 100.0% Validation accuracy: 82.5% Minibatch loss at step 800: 0.741018 Minibatch accuracy: 75.0% Validation accuracy: 80.8% Minibatch loss at step 850: 1.040641 Minibatch accuracy: 68.8% Validation accuracy: 81.2% Minibatch loss at step 900: 0.620474 Minibatch accuracy: 87.5% Validation accuracy: 82.6% Minibatch loss at step 950: 0.557300 Minibatch accuracy: 87.5% Validation accuracy: 82.7% Minibatch loss at step 1000: 0.389972 Minibatch accuracy: 87.5% Validation accuracy: 82.7% Test accuracy: 89.7%
The convolutional model above uses convolutions with stride 2 to reduce the dimensionality. Replace the strides by a max pooling operation (nn.max_pool()
) of stride 2 and kernel size 2.
batch_size = 16
patch_size = 5
depth = 16
num_hidden = 64
graph = tf.Graph()
with graph.as_default():
# Input data.
tf_train_dataset = tf.placeholder(
tf.float32, shape=(batch_size, image_size, image_size, num_channels))
tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels))
tf_valid_dataset = tf.constant(valid_dataset)
tf_test_dataset = tf.constant(test_dataset)
# Variables.
layer1_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, num_channels, depth], stddev=0.1))
layer1_biases = tf.Variable(tf.zeros([depth]))
layer2_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, depth, depth], stddev=0.1))
layer2_biases = tf.Variable(tf.constant(1.0, shape=[depth]))
layer3_weights = tf.Variable(tf.truncated_normal(
[image_size // 4 * image_size // 4 * depth, num_hidden], stddev=0.1))
layer3_biases = tf.Variable(tf.constant(1.0, shape=[num_hidden]))
layer4_weights = tf.Variable(tf.truncated_normal(
[num_hidden, num_labels], stddev=0.1))
layer4_biases = tf.Variable(tf.constant(1.0, shape=[num_labels]))
# Model.
def model(data):
conv1 = tf.nn.conv2d(data, layer1_weights, [1, 1, 1, 1], padding='SAME')
bias1 = tf.nn.relu(conv1 + layer1_biases)
pool1 = tf.nn.max_pool(bias1, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME')
conv2 = tf.nn.conv2d(pool1, layer2_weights, [1, 1, 1, 1], padding='SAME')
bias2 = tf.nn.relu(conv2 + layer2_biases)
pool2 = tf.nn.max_pool(bias2, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME')
shape = pool2.get_shape().as_list()
reshape = tf.reshape(pool2, [shape[0], shape[1] * shape[2] * shape[3]])
hidden = tf.nn.relu(tf.matmul(reshape, layer3_weights) + layer3_biases)
return tf.matmul(hidden, layer4_weights) + layer4_biases
# Training computation.
logits = model(tf_train_dataset)
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits, tf_train_labels))
# Optimizer.
optimizer = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
# Predictions for the training, validation, and test data.
train_prediction = tf.nn.softmax(logits)
valid_prediction = tf.nn.softmax(model(tf_valid_dataset))
test_prediction = tf.nn.softmax(model(tf_test_dataset))
num_steps = 1001
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
print('Initialized')
for step in range(num_steps):
offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
batch_data = train_dataset[offset:(offset + batch_size), :, :, :]
batch_labels = train_labels[offset:(offset + batch_size), :]
feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels}
_, l, predictions = session.run(
[optimizer, loss, train_prediction], feed_dict=feed_dict)
if (step % 50 == 0):
print('Minibatch loss at step %d: %f' % (step, l))
print('Minibatch accuracy: %.1f%%' % accuracy(predictions, batch_labels))
print('Validation accuracy: %.1f%%' % accuracy(
valid_prediction.eval(), valid_labels))
print('Test accuracy: %.1f%%' % accuracy(test_prediction.eval(), test_labels))
Initialized Minibatch loss at step 0: 3.779494 Minibatch accuracy: 6.2% Validation accuracy: 10.0% Minibatch loss at step 50: 1.866151 Minibatch accuracy: 37.5% Validation accuracy: 40.1% Minibatch loss at step 100: 1.125046 Minibatch accuracy: 56.2% Validation accuracy: 56.2% Minibatch loss at step 150: 0.518518 Minibatch accuracy: 87.5% Validation accuracy: 75.0% Minibatch loss at step 200: 1.233413 Minibatch accuracy: 50.0% Validation accuracy: 76.1% Minibatch loss at step 250: 1.081354 Minibatch accuracy: 68.8% Validation accuracy: 77.6% Minibatch loss at step 300: 0.417454 Minibatch accuracy: 87.5% Validation accuracy: 79.8% Minibatch loss at step 350: 0.428833 Minibatch accuracy: 93.8% Validation accuracy: 79.4% Minibatch loss at step 400: 0.205477 Minibatch accuracy: 100.0% Validation accuracy: 81.0% Minibatch loss at step 450: 0.671585 Minibatch accuracy: 87.5% Validation accuracy: 79.6% Minibatch loss at step 500: 0.673592 Minibatch accuracy: 87.5% Validation accuracy: 81.7% Minibatch loss at step 550: 0.713905 Minibatch accuracy: 75.0% Validation accuracy: 82.2% Minibatch loss at step 600: 0.371910 Minibatch accuracy: 93.8% Validation accuracy: 82.8% Minibatch loss at step 650: 0.860517 Minibatch accuracy: 81.2% Validation accuracy: 82.7% Minibatch loss at step 700: 0.719865 Minibatch accuracy: 68.8% Validation accuracy: 81.2% Minibatch loss at step 750: 0.061006 Minibatch accuracy: 100.0% Validation accuracy: 83.5% Minibatch loss at step 800: 0.573218 Minibatch accuracy: 87.5% Validation accuracy: 83.5% Minibatch loss at step 850: 0.780923 Minibatch accuracy: 81.2% Validation accuracy: 83.6% Minibatch loss at step 900: 0.558760 Minibatch accuracy: 87.5% Validation accuracy: 84.0% Minibatch loss at step 950: 0.525676 Minibatch accuracy: 81.2% Validation accuracy: 84.1% Minibatch loss at step 1000: 0.283331 Minibatch accuracy: 87.5% Validation accuracy: 84.4% Test accuracy: 91.3%
The CNN below is loosely inspired by the LeNet5 architecture.
batch_size = 16
patch_size = 5
depth = 16
num_hidden = 64
graph = tf.Graph()
with graph.as_default():
# Input data.
tf_train_dataset = tf.placeholder(
tf.float32, shape=(batch_size, image_size, image_size, num_channels))
tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels))
tf_valid_dataset = tf.constant(valid_dataset)
tf_test_dataset = tf.constant(test_dataset)
# Variables.
layer1_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, num_channels, depth], stddev=0.1))
layer1_biases = tf.Variable(tf.zeros([depth]))
layer2_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, depth, depth], stddev=0.1))
layer2_biases = tf.Variable(tf.constant(1.0, shape=[depth]))
size3 = ((image_size - patch_size + 1) // 2 - patch_size + 1) // 2
layer3_weights = tf.Variable(tf.truncated_normal(
[size3 * size3 * depth, num_hidden], stddev=0.1))
layer3_biases = tf.Variable(tf.constant(1.0, shape=[num_hidden]))
layer4_weights = tf.Variable(tf.truncated_normal(
[num_hidden, num_labels], stddev=0.1))
layer4_biases = tf.Variable(tf.constant(1.0, shape=[num_labels]))
# Model.
def model(data):
# C1 input 28 x 28
conv1 = tf.nn.conv2d(data, layer1_weights, [1, 1, 1, 1], padding='VALID')
bias1 = tf.nn.relu(conv1 + layer1_biases)
# S2 input 24 x 24
pool2 = tf.nn.avg_pool(bias1, [1, 2, 2, 1], [1, 2, 2, 1], padding='VALID')
# C3 input 12 x 12
conv3 = tf.nn.conv2d(pool2, layer2_weights, [1, 1, 1, 1], padding='VALID')
bias3 = tf.nn.relu(conv3 + layer2_biases)
# S4 input 8 x 8
pool4 = tf.nn.avg_pool(bias3, [1, 2, 2, 1], [1, 2, 2, 1], padding='VALID')
# F6 input 4 x 4
shape = pool4.get_shape().as_list()
reshape = tf.reshape(pool4, [shape[0], shape[1] * shape[2] * shape[3]])
hidden = tf.nn.relu(tf.matmul(reshape, layer3_weights) + layer3_biases)
return tf.matmul(hidden, layer4_weights) + layer4_biases
# Training computation.
logits = model(tf_train_dataset)
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits, tf_train_labels))
# Optimizer.
optimizer = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
# Predictions for the training, validation, and test data.
train_prediction = tf.nn.softmax(logits)
valid_prediction = tf.nn.softmax(model(tf_valid_dataset))
test_prediction = tf.nn.softmax(model(tf_test_dataset))
num_steps = 20001
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
print('Initialized')
for step in range(num_steps):
offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
batch_data = train_dataset[offset:(offset + batch_size), :, :, :]
batch_labels = train_labels[offset:(offset + batch_size), :]
feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels}
_, l, predictions = session.run(
[optimizer, loss, train_prediction], feed_dict=feed_dict)
if (step % 50 == 0):
print('Minibatch loss at step %d: %f' % (step, l))
print('Minibatch accuracy: %.1f%%' % accuracy(predictions, batch_labels))
print('Validation accuracy: %.1f%%' % accuracy(
valid_prediction.eval(), valid_labels))
print('Test accuracy: %.1f%%' % accuracy(test_prediction.eval(), test_labels))
Initialized Minibatch loss at step 0: 2.887526 Minibatch accuracy: 6.2% Validation accuracy: 10.0% Minibatch loss at step 50: 1.755834 Minibatch accuracy: 43.8% Validation accuracy: 49.1% Minibatch loss at step 100: 1.122573 Minibatch accuracy: 62.5% Validation accuracy: 59.9% Minibatch loss at step 150: 0.818305 Minibatch accuracy: 81.2% Validation accuracy: 68.2% Minibatch loss at step 200: 1.141408 Minibatch accuracy: 62.5% Validation accuracy: 70.2% Minibatch loss at step 250: 1.179383 Minibatch accuracy: 75.0% Validation accuracy: 74.6% Minibatch loss at step 300: 0.573698 Minibatch accuracy: 87.5% Validation accuracy: 75.9% Minibatch loss at step 350: 0.567827 Minibatch accuracy: 81.2% Validation accuracy: 73.7% Minibatch loss at step 400: 0.373525 Minibatch accuracy: 100.0% Validation accuracy: 78.5% Minibatch loss at step 450: 0.923118 Minibatch accuracy: 81.2% Validation accuracy: 77.4% Minibatch loss at step 500: 0.829145 Minibatch accuracy: 87.5% Validation accuracy: 78.0% Minibatch loss at step 550: 1.070729 Minibatch accuracy: 75.0% Validation accuracy: 78.0% Minibatch loss at step 600: 0.396518 Minibatch accuracy: 87.5% Validation accuracy: 79.6% Minibatch loss at step 650: 0.829448 Minibatch accuracy: 87.5% Validation accuracy: 79.4% Minibatch loss at step 700: 0.859195 Minibatch accuracy: 68.8% Validation accuracy: 80.2% Minibatch loss at step 750: 0.065276 Minibatch accuracy: 100.0% Validation accuracy: 80.4% Minibatch loss at step 800: 0.508514 Minibatch accuracy: 81.2% Validation accuracy: 80.8% Minibatch loss at step 850: 1.009475 Minibatch accuracy: 75.0% Validation accuracy: 79.8% Minibatch loss at step 900: 0.941263 Minibatch accuracy: 81.2% Validation accuracy: 81.1% Minibatch loss at step 950: 0.570730 Minibatch accuracy: 87.5% Validation accuracy: 81.1% Minibatch loss at step 1000: 0.480312 Minibatch accuracy: 81.2% Validation accuracy: 80.9% Minibatch loss at step 1050: 0.600223 Minibatch accuracy: 81.2% Validation accuracy: 80.8% Minibatch loss at step 1100: 0.612804 Minibatch accuracy: 75.0% Validation accuracy: 82.5% Minibatch loss at step 1150: 0.347645 Minibatch accuracy: 93.8% Validation accuracy: 80.9% Minibatch loss at step 1200: 1.044914 Minibatch accuracy: 75.0% Validation accuracy: 82.7% Minibatch loss at step 1250: 0.641687 Minibatch accuracy: 81.2% Validation accuracy: 82.5% Minibatch loss at step 1300: 0.314071 Minibatch accuracy: 93.8% Validation accuracy: 82.7% Minibatch loss at step 1350: 0.956139 Minibatch accuracy: 62.5% Validation accuracy: 82.1% Minibatch loss at step 1400: 0.257573 Minibatch accuracy: 93.8% Validation accuracy: 82.5% Minibatch loss at step 1450: 0.341947 Minibatch accuracy: 87.5% Validation accuracy: 83.4% Minibatch loss at step 1500: 0.527776 Minibatch accuracy: 81.2% Validation accuracy: 82.5% Minibatch loss at step 1550: 0.554325 Minibatch accuracy: 81.2% Validation accuracy: 82.9% Minibatch loss at step 1600: 0.881043 Minibatch accuracy: 81.2% Validation accuracy: 83.0% Minibatch loss at step 1650: 0.619974 Minibatch accuracy: 75.0% Validation accuracy: 82.4% Minibatch loss at step 1700: 0.640255 Minibatch accuracy: 87.5% Validation accuracy: 83.4% Minibatch loss at step 1750: 0.571080 Minibatch accuracy: 81.2% Validation accuracy: 83.2% Minibatch loss at step 1800: 0.540935 Minibatch accuracy: 81.2% Validation accuracy: 83.6% Minibatch loss at step 1850: 0.652062 Minibatch accuracy: 75.0% Validation accuracy: 84.3% Minibatch loss at step 1900: 0.286824 Minibatch accuracy: 87.5% Validation accuracy: 84.1% Minibatch loss at step 1950: 0.571306 Minibatch accuracy: 81.2% Validation accuracy: 84.5% Minibatch loss at step 2000: 0.085546 Minibatch accuracy: 100.0% Validation accuracy: 84.5% Minibatch loss at step 2050: 0.778835 Minibatch accuracy: 75.0% Validation accuracy: 84.2% Minibatch loss at step 2100: 0.305310 Minibatch accuracy: 93.8% Validation accuracy: 85.1% Minibatch loss at step 2150: 0.367425 Minibatch accuracy: 93.8% Validation accuracy: 84.9% Minibatch loss at step 2200: 0.371174 Minibatch accuracy: 87.5% Validation accuracy: 84.4% Minibatch loss at step 2250: 0.613358 Minibatch accuracy: 81.2% Validation accuracy: 85.0% Minibatch loss at step 2300: 0.750378 Minibatch accuracy: 87.5% Validation accuracy: 84.2% Minibatch loss at step 2350: 0.418179 Minibatch accuracy: 87.5% Validation accuracy: 85.0% Minibatch loss at step 2400: 0.501598 Minibatch accuracy: 81.2% Validation accuracy: 84.4% Minibatch loss at step 2450: 0.581857 Minibatch accuracy: 81.2% Validation accuracy: 84.5% Minibatch loss at step 2500: 0.903965 Minibatch accuracy: 75.0% Validation accuracy: 84.2% Minibatch loss at step 2550: 0.426037 Minibatch accuracy: 87.5% Validation accuracy: 84.9% Minibatch loss at step 2600: 0.099809 Minibatch accuracy: 100.0% Validation accuracy: 85.3% Minibatch loss at step 2650: 0.328693 Minibatch accuracy: 93.8% Validation accuracy: 85.5% Minibatch loss at step 2700: 0.639310 Minibatch accuracy: 93.8% Validation accuracy: 85.3% Minibatch loss at step 2750: 1.238202 Minibatch accuracy: 75.0% Validation accuracy: 85.5% Minibatch loss at step 2800: 0.425977 Minibatch accuracy: 81.2% Validation accuracy: 85.5% Minibatch loss at step 2850: 0.074243 Minibatch accuracy: 100.0% Validation accuracy: 85.7% Minibatch loss at step 2900: 0.343937 Minibatch accuracy: 87.5% Validation accuracy: 85.2% Minibatch loss at step 2950: 0.420437 Minibatch accuracy: 93.8% Validation accuracy: 85.5% Minibatch loss at step 3000: 0.697705 Minibatch accuracy: 87.5% Validation accuracy: 85.7% Minibatch loss at step 3050: 0.445790 Minibatch accuracy: 93.8% Validation accuracy: 85.7% Minibatch loss at step 3100: 0.510697 Minibatch accuracy: 81.2% Validation accuracy: 84.9% Minibatch loss at step 3150: 0.656437 Minibatch accuracy: 81.2% Validation accuracy: 85.5% Minibatch loss at step 3200: 0.525945 Minibatch accuracy: 87.5% Validation accuracy: 85.3% Minibatch loss at step 3250: 0.379972 Minibatch accuracy: 87.5% Validation accuracy: 85.7% Minibatch loss at step 3300: 0.110951 Minibatch accuracy: 93.8% Validation accuracy: 86.5% Minibatch loss at step 3350: 0.242193 Minibatch accuracy: 93.8% Validation accuracy: 86.2% Minibatch loss at step 3400: 0.595421 Minibatch accuracy: 81.2% Validation accuracy: 86.3% Minibatch loss at step 3450: 0.388807 Minibatch accuracy: 93.8% Validation accuracy: 85.6% Minibatch loss at step 3500: 0.294891 Minibatch accuracy: 87.5% Validation accuracy: 85.8% Minibatch loss at step 3550: 0.295007 Minibatch accuracy: 93.8% Validation accuracy: 86.0% Minibatch loss at step 3600: 0.104769 Minibatch accuracy: 100.0% Validation accuracy: 86.2% Minibatch loss at step 3650: 0.843573 Minibatch accuracy: 75.0% Validation accuracy: 85.7% Minibatch loss at step 3700: 0.847604 Minibatch accuracy: 68.8% Validation accuracy: 86.2% Minibatch loss at step 3750: 0.677515 Minibatch accuracy: 81.2% Validation accuracy: 85.9% Minibatch loss at step 3800: 0.011699 Minibatch accuracy: 100.0% Validation accuracy: 86.2% Minibatch loss at step 3850: 0.731791 Minibatch accuracy: 81.2% Validation accuracy: 86.7% Minibatch loss at step 3900: 0.505476 Minibatch accuracy: 81.2% Validation accuracy: 85.7% Minibatch loss at step 3950: 0.017837 Minibatch accuracy: 100.0% Validation accuracy: 86.2% Minibatch loss at step 4000: 0.389282 Minibatch accuracy: 87.5% Validation accuracy: 86.4% Minibatch loss at step 4050: 0.880657 Minibatch accuracy: 75.0% Validation accuracy: 84.9% Minibatch loss at step 4100: 0.545353 Minibatch accuracy: 81.2% Validation accuracy: 86.7% Minibatch loss at step 4150: 1.049770 Minibatch accuracy: 68.8% Validation accuracy: 86.1% Minibatch loss at step 4200: 0.369591 Minibatch accuracy: 93.8% Validation accuracy: 85.9% Minibatch loss at step 4250: 0.612737 Minibatch accuracy: 75.0% Validation accuracy: 86.3% Minibatch loss at step 4300: 0.461530 Minibatch accuracy: 87.5% Validation accuracy: 86.3% Minibatch loss at step 4350: 0.200963 Minibatch accuracy: 93.8% Validation accuracy: 86.4% Minibatch loss at step 4400: 1.108174 Minibatch accuracy: 75.0% Validation accuracy: 86.5% Minibatch loss at step 4450: 0.509003 Minibatch accuracy: 81.2% Validation accuracy: 86.1% Minibatch loss at step 4500: 0.609005 Minibatch accuracy: 87.5% Validation accuracy: 86.8% Minibatch loss at step 4550: 0.372265 Minibatch accuracy: 87.5% Validation accuracy: 87.1% Minibatch loss at step 4600: 0.533304 Minibatch accuracy: 87.5% Validation accuracy: 86.2% Minibatch loss at step 4650: 0.773061 Minibatch accuracy: 81.2% Validation accuracy: 86.3% Minibatch loss at step 4700: 0.506120 Minibatch accuracy: 87.5% Validation accuracy: 87.0% Minibatch loss at step 4750: 0.928353 Minibatch accuracy: 68.8% Validation accuracy: 86.3% Minibatch loss at step 4800: 0.489047 Minibatch accuracy: 87.5% Validation accuracy: 86.7% Minibatch loss at step 4850: 0.354044 Minibatch accuracy: 93.8% Validation accuracy: 87.1% Minibatch loss at step 4900: 0.119216 Minibatch accuracy: 93.8% Validation accuracy: 87.0% Minibatch loss at step 4950: 0.178830 Minibatch accuracy: 93.8% Validation accuracy: 87.2% Minibatch loss at step 5000: 0.955608 Minibatch accuracy: 68.8% Validation accuracy: 86.2% Minibatch loss at step 5050: 0.237759 Minibatch accuracy: 93.8% Validation accuracy: 87.0% Minibatch loss at step 5100: 0.275851 Minibatch accuracy: 93.8% Validation accuracy: 87.0% Minibatch loss at step 5150: 0.449317 Minibatch accuracy: 87.5% Validation accuracy: 86.1% Minibatch loss at step 5200: 0.273233 Minibatch accuracy: 100.0% Validation accuracy: 86.6% Minibatch loss at step 5250: 0.154574 Minibatch accuracy: 100.0% Validation accuracy: 87.1% Minibatch loss at step 5300: 0.304150 Minibatch accuracy: 93.8% Validation accuracy: 87.0% Minibatch loss at step 5350: 0.274738 Minibatch accuracy: 87.5% Validation accuracy: 87.0% Minibatch loss at step 5400: 0.401083 Minibatch accuracy: 87.5% Validation accuracy: 87.2% Minibatch loss at step 5450: 0.334712 Minibatch accuracy: 87.5% Validation accuracy: 86.7% Minibatch loss at step 5500: 0.447083 Minibatch accuracy: 87.5% Validation accuracy: 87.2% Minibatch loss at step 5550: 0.379631 Minibatch accuracy: 75.0% Validation accuracy: 86.6% Minibatch loss at step 5600: 0.286631 Minibatch accuracy: 87.5% Validation accuracy: 87.2% Minibatch loss at step 5650: 0.309757 Minibatch accuracy: 81.2% Validation accuracy: 87.2% Minibatch loss at step 5700: 0.507893 Minibatch accuracy: 87.5% Validation accuracy: 87.4% Minibatch loss at step 5750: 0.620230 Minibatch accuracy: 87.5% Validation accuracy: 87.2% Minibatch loss at step 5800: 0.169920 Minibatch accuracy: 93.8% Validation accuracy: 87.1% Minibatch loss at step 5850: 0.745497 Minibatch accuracy: 81.2% Validation accuracy: 87.1% Minibatch loss at step 5900: 0.767921 Minibatch accuracy: 75.0% Validation accuracy: 86.6% Minibatch loss at step 5950: 0.312666 Minibatch accuracy: 93.8% Validation accuracy: 87.2% Minibatch loss at step 6000: 0.168658 Minibatch accuracy: 93.8% Validation accuracy: 86.9% Minibatch loss at step 6050: 0.373202 Minibatch accuracy: 81.2% Validation accuracy: 87.2% Minibatch loss at step 6100: 0.774956 Minibatch accuracy: 75.0% Validation accuracy: 87.1% Minibatch loss at step 6150: 0.190329 Minibatch accuracy: 93.8% Validation accuracy: 87.4% Minibatch loss at step 6200: 1.050667 Minibatch accuracy: 75.0% Validation accuracy: 87.4% Minibatch loss at step 6250: 0.822543 Minibatch accuracy: 81.2% Validation accuracy: 87.1% Minibatch loss at step 6300: 0.708801 Minibatch accuracy: 75.0% Validation accuracy: 87.3% Minibatch loss at step 6350: 0.209116 Minibatch accuracy: 93.8% Validation accuracy: 87.3% Minibatch loss at step 6400: 0.129195 Minibatch accuracy: 93.8% Validation accuracy: 87.3% Minibatch loss at step 6450: 0.241793 Minibatch accuracy: 87.5% Validation accuracy: 87.4% Minibatch loss at step 6500: 0.820996 Minibatch accuracy: 68.8% Validation accuracy: 87.2% Minibatch loss at step 6550: 0.095217 Minibatch accuracy: 100.0% Validation accuracy: 87.4% Minibatch loss at step 6600: 0.252888 Minibatch accuracy: 93.8% Validation accuracy: 87.2% Minibatch loss at step 6650: 1.725186 Minibatch accuracy: 56.2% Validation accuracy: 87.2% Minibatch loss at step 6700: 0.191433 Minibatch accuracy: 93.8% Validation accuracy: 87.4% Minibatch loss at step 6750: 0.353232 Minibatch accuracy: 87.5% Validation accuracy: 87.5% Minibatch loss at step 6800: 0.726301 Minibatch accuracy: 87.5% Validation accuracy: 87.6% Minibatch loss at step 6850: 0.515651 Minibatch accuracy: 81.2% Validation accuracy: 87.5% Minibatch loss at step 6900: 0.452586 Minibatch accuracy: 81.2% Validation accuracy: 87.0% Minibatch loss at step 6950: 0.179482 Minibatch accuracy: 100.0% Validation accuracy: 87.2% Minibatch loss at step 7000: 0.931371 Minibatch accuracy: 68.8% Validation accuracy: 87.7% Minibatch loss at step 7050: 0.871163 Minibatch accuracy: 68.8% Validation accuracy: 87.3% Minibatch loss at step 7100: 0.498850 Minibatch accuracy: 81.2% Validation accuracy: 87.2% Minibatch loss at step 7150: 0.299375 Minibatch accuracy: 93.8% Validation accuracy: 87.1% Minibatch loss at step 7200: 0.470262 Minibatch accuracy: 81.2% Validation accuracy: 87.7% Minibatch loss at step 7250: 0.305438 Minibatch accuracy: 93.8% Validation accuracy: 87.7% Minibatch loss at step 7300: 0.586515 Minibatch accuracy: 81.2% Validation accuracy: 87.3% Minibatch loss at step 7350: 0.242167 Minibatch accuracy: 87.5% Validation accuracy: 87.1% Minibatch loss at step 7400: 0.011232 Minibatch accuracy: 100.0% Validation accuracy: 87.8% Minibatch loss at step 7450: 0.328229 Minibatch accuracy: 81.2% Validation accuracy: 87.5% Minibatch loss at step 7500: 0.261614 Minibatch accuracy: 93.8% Validation accuracy: 87.8% Minibatch loss at step 7550: 0.394588 Minibatch accuracy: 93.8% Validation accuracy: 87.3% Minibatch loss at step 7600: 0.519581 Minibatch accuracy: 81.2% Validation accuracy: 87.5% Minibatch loss at step 7650: 0.201451 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 7700: 0.152650 Minibatch accuracy: 93.8% Validation accuracy: 87.1% Minibatch loss at step 7750: 0.329386 Minibatch accuracy: 87.5% Validation accuracy: 87.6% Minibatch loss at step 7800: 0.292810 Minibatch accuracy: 93.8% Validation accuracy: 87.6% Minibatch loss at step 7850: 0.450945 Minibatch accuracy: 87.5% Validation accuracy: 87.3% Minibatch loss at step 7900: 0.103688 Minibatch accuracy: 100.0% Validation accuracy: 87.7% Minibatch loss at step 7950: 0.825726 Minibatch accuracy: 68.8% Validation accuracy: 87.4% Minibatch loss at step 8000: 0.426394 Minibatch accuracy: 81.2% Validation accuracy: 87.6% Minibatch loss at step 8050: 0.299139 Minibatch accuracy: 87.5% Validation accuracy: 87.7% Minibatch loss at step 8100: 0.387193 Minibatch accuracy: 87.5% Validation accuracy: 87.5% Minibatch loss at step 8150: 0.923081 Minibatch accuracy: 81.2% Validation accuracy: 87.5% Minibatch loss at step 8200: 0.326014 Minibatch accuracy: 93.8% Validation accuracy: 87.3% Minibatch loss at step 8250: 0.321350 Minibatch accuracy: 93.8% Validation accuracy: 87.8% Minibatch loss at step 8300: 0.244986 Minibatch accuracy: 93.8% Validation accuracy: 87.8% Minibatch loss at step 8350: 0.455575 Minibatch accuracy: 87.5% Validation accuracy: 87.6% Minibatch loss at step 8400: 0.197823 Minibatch accuracy: 93.8% Validation accuracy: 88.0% Minibatch loss at step 8450: 0.207904 Minibatch accuracy: 93.8% Validation accuracy: 87.7% Minibatch loss at step 8500: 0.332120 Minibatch accuracy: 87.5% Validation accuracy: 87.6% Minibatch loss at step 8550: 0.314461 Minibatch accuracy: 87.5% Validation accuracy: 87.6% Minibatch loss at step 8600: 0.302965 Minibatch accuracy: 93.8% Validation accuracy: 88.0% Minibatch loss at step 8650: 0.540094 Minibatch accuracy: 81.2% Validation accuracy: 88.0% Minibatch loss at step 8700: 0.226378 Minibatch accuracy: 93.8% Validation accuracy: 87.9% Minibatch loss at step 8750: 0.103386 Minibatch accuracy: 100.0% Validation accuracy: 87.6% Minibatch loss at step 8800: 0.180387 Minibatch accuracy: 93.8% Validation accuracy: 87.8% Minibatch loss at step 8850: 0.014346 Minibatch accuracy: 100.0% Validation accuracy: 87.2% Minibatch loss at step 8900: 0.449845 Minibatch accuracy: 81.2% Validation accuracy: 87.7% Minibatch loss at step 8950: 0.290058 Minibatch accuracy: 87.5% Validation accuracy: 87.9% Minibatch loss at step 9000: 0.387533 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 9050: 0.340914 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 9100: 0.406798 Minibatch accuracy: 87.5% Validation accuracy: 87.7% Minibatch loss at step 9150: 0.764048 Minibatch accuracy: 75.0% Validation accuracy: 87.2% Minibatch loss at step 9200: 0.293021 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 9250: 0.839284 Minibatch accuracy: 75.0% Validation accuracy: 87.9% Minibatch loss at step 9300: 0.926702 Minibatch accuracy: 81.2% Validation accuracy: 87.5% Minibatch loss at step 9350: 0.294565 Minibatch accuracy: 87.5% Validation accuracy: 87.4% Minibatch loss at step 9400: 0.348141 Minibatch accuracy: 87.5% Validation accuracy: 87.9% Minibatch loss at step 9450: 0.339952 Minibatch accuracy: 93.8% Validation accuracy: 87.9% Minibatch loss at step 9500: 0.244894 Minibatch accuracy: 93.8% Validation accuracy: 87.9% Minibatch loss at step 9550: 0.226825 Minibatch accuracy: 100.0% Validation accuracy: 88.2% Minibatch loss at step 9600: 0.356070 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 9650: 0.401066 Minibatch accuracy: 81.2% Validation accuracy: 87.8% Minibatch loss at step 9700: 0.234334 Minibatch accuracy: 93.8% Validation accuracy: 88.0% Minibatch loss at step 9750: 0.159659 Minibatch accuracy: 93.8% Validation accuracy: 87.9% Minibatch loss at step 9800: 0.380945 Minibatch accuracy: 87.5% Validation accuracy: 87.5% Minibatch loss at step 9850: 0.251576 Minibatch accuracy: 93.8% Validation accuracy: 87.4% Minibatch loss at step 9900: 0.566218 Minibatch accuracy: 81.2% Validation accuracy: 88.0% Minibatch loss at step 9950: 0.279412 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 10000: 0.215329 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 10050: 0.019123 Minibatch accuracy: 100.0% Validation accuracy: 88.1% Minibatch loss at step 10100: 0.356180 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 10150: 0.567362 Minibatch accuracy: 81.2% Validation accuracy: 88.1% Minibatch loss at step 10200: 0.189304 Minibatch accuracy: 93.8% Validation accuracy: 88.4% Minibatch loss at step 10250: 0.446280 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 10300: 0.126306 Minibatch accuracy: 93.8% Validation accuracy: 88.0% Minibatch loss at step 10350: 0.471321 Minibatch accuracy: 87.5% Validation accuracy: 88.1% Minibatch loss at step 10400: 0.290378 Minibatch accuracy: 87.5% Validation accuracy: 88.1% Minibatch loss at step 10450: 0.317353 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 10500: 0.340341 Minibatch accuracy: 81.2% Validation accuracy: 87.9% Minibatch loss at step 10550: 0.853542 Minibatch accuracy: 75.0% Validation accuracy: 88.1% Minibatch loss at step 10600: 0.720329 Minibatch accuracy: 81.2% Validation accuracy: 88.0% Minibatch loss at step 10650: 0.351055 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 10700: 0.048014 Minibatch accuracy: 100.0% Validation accuracy: 88.1% Minibatch loss at step 10750: 0.276408 Minibatch accuracy: 93.8% Validation accuracy: 87.4% Minibatch loss at step 10800: 0.477006 Minibatch accuracy: 81.2% Validation accuracy: 88.1% Minibatch loss at step 10850: 0.774067 Minibatch accuracy: 87.5% Validation accuracy: 87.7% Minibatch loss at step 10900: 0.237711 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 10950: 0.447895 Minibatch accuracy: 81.2% Validation accuracy: 88.0% Minibatch loss at step 11000: 0.093522 Minibatch accuracy: 93.8% Validation accuracy: 87.8% Minibatch loss at step 11050: 0.410395 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 11100: 0.094415 Minibatch accuracy: 100.0% Validation accuracy: 87.5% Minibatch loss at step 11150: 0.322285 Minibatch accuracy: 87.5% Validation accuracy: 88.1% Minibatch loss at step 11200: 0.145736 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 11250: 0.962268 Minibatch accuracy: 62.5% Validation accuracy: 88.4% Minibatch loss at step 11300: 0.364485 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 11350: 0.390612 Minibatch accuracy: 81.2% Validation accuracy: 88.2% Minibatch loss at step 11400: 0.284497 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 11450: 0.343720 Minibatch accuracy: 93.8% Validation accuracy: 87.9% Minibatch loss at step 11500: 0.392723 Minibatch accuracy: 87.5% Validation accuracy: 88.1% Minibatch loss at step 11550: 0.357363 Minibatch accuracy: 87.5% Validation accuracy: 87.9% Minibatch loss at step 11600: 0.417761 Minibatch accuracy: 81.2% Validation accuracy: 88.4% Minibatch loss at step 11650: 0.378388 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 11700: 0.893325 Minibatch accuracy: 62.5% Validation accuracy: 88.2% Minibatch loss at step 11750: 0.508242 Minibatch accuracy: 81.2% Validation accuracy: 88.4% Minibatch loss at step 11800: 0.019753 Minibatch accuracy: 100.0% Validation accuracy: 87.7% Minibatch loss at step 11850: 0.637589 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 11900: 0.371976 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 11950: 0.838995 Minibatch accuracy: 62.5% Validation accuracy: 88.4% Minibatch loss at step 12000: 0.444190 Minibatch accuracy: 87.5% Validation accuracy: 88.0% Minibatch loss at step 12050: 0.038973 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 12100: 0.501612 Minibatch accuracy: 81.2% Validation accuracy: 88.2% Minibatch loss at step 12150: 0.247185 Minibatch accuracy: 93.8% Validation accuracy: 88.5% Minibatch loss at step 12200: 0.328059 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 12250: 0.381155 Minibatch accuracy: 87.5% Validation accuracy: 88.2% Minibatch loss at step 12300: 0.291867 Minibatch accuracy: 93.8% Validation accuracy: 88.3% Minibatch loss at step 12350: 0.758298 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 12400: 0.018543 Minibatch accuracy: 100.0% Validation accuracy: 88.2% Minibatch loss at step 12450: 0.952901 Minibatch accuracy: 75.0% Validation accuracy: 88.3% Minibatch loss at step 12500: 0.452856 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 12550: 0.775165 Minibatch accuracy: 75.0% Validation accuracy: 87.5% Minibatch loss at step 12600: 0.543465 Minibatch accuracy: 75.0% Validation accuracy: 88.1% Minibatch loss at step 12650: 0.423330 Minibatch accuracy: 93.8% Validation accuracy: 87.5% Minibatch loss at step 12700: 0.339476 Minibatch accuracy: 87.5% Validation accuracy: 87.9% Minibatch loss at step 12750: 0.207814 Minibatch accuracy: 93.8% Validation accuracy: 88.4% Minibatch loss at step 12800: 0.152592 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 12850: 0.250027 Minibatch accuracy: 93.8% Validation accuracy: 88.0% Minibatch loss at step 12900: 0.150702 Minibatch accuracy: 100.0% Validation accuracy: 88.1% Minibatch loss at step 12950: 0.036315 Minibatch accuracy: 100.0% Validation accuracy: 88.0% Minibatch loss at step 13000: 0.224514 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 13050: 0.276813 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 13100: 0.267251 Minibatch accuracy: 93.8% Validation accuracy: 88.4% Minibatch loss at step 13150: 0.248624 Minibatch accuracy: 87.5% Validation accuracy: 88.2% Minibatch loss at step 13200: 0.317007 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 13250: 0.949058 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 13300: 0.298962 Minibatch accuracy: 93.8% Validation accuracy: 88.4% Minibatch loss at step 13350: 0.106796 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 13400: 0.471004 Minibatch accuracy: 87.5% Validation accuracy: 88.5% Minibatch loss at step 13450: 0.339622 Minibatch accuracy: 81.2% Validation accuracy: 88.4% Minibatch loss at step 13500: 0.388597 Minibatch accuracy: 87.5% Validation accuracy: 88.2% Minibatch loss at step 13550: 0.395972 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 13600: 0.326045 Minibatch accuracy: 87.5% Validation accuracy: 88.6% Minibatch loss at step 13650: 0.497251 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 13700: 0.386662 Minibatch accuracy: 81.2% Validation accuracy: 88.5% Minibatch loss at step 13750: 0.576945 Minibatch accuracy: 81.2% Validation accuracy: 88.6% Minibatch loss at step 13800: 0.047079 Minibatch accuracy: 100.0% Validation accuracy: 88.0% Minibatch loss at step 13850: 0.130557 Minibatch accuracy: 93.8% Validation accuracy: 88.1% Minibatch loss at step 13900: 0.168503 Minibatch accuracy: 93.8% Validation accuracy: 88.5% Minibatch loss at step 13950: 0.453997 Minibatch accuracy: 81.2% Validation accuracy: 88.5% Minibatch loss at step 14000: 0.054875 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 14050: 0.387485 Minibatch accuracy: 87.5% Validation accuracy: 87.7% Minibatch loss at step 14100: 0.558513 Minibatch accuracy: 93.8% Validation accuracy: 88.3% Minibatch loss at step 14150: 0.281757 Minibatch accuracy: 93.8% Validation accuracy: 88.3% Minibatch loss at step 14200: 0.194573 Minibatch accuracy: 100.0% Validation accuracy: 87.6% Minibatch loss at step 14250: 0.234614 Minibatch accuracy: 93.8% Validation accuracy: 88.5% Minibatch loss at step 14300: 0.405016 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 14350: 0.146940 Minibatch accuracy: 87.5% Validation accuracy: 88.6% Minibatch loss at step 14400: 0.295947 Minibatch accuracy: 93.8% Validation accuracy: 88.2% Minibatch loss at step 14450: 0.073702 Minibatch accuracy: 100.0% Validation accuracy: 88.6% Minibatch loss at step 14500: 0.348648 Minibatch accuracy: 87.5% Validation accuracy: 88.2% Minibatch loss at step 14550: 0.555093 Minibatch accuracy: 81.2% Validation accuracy: 88.2% Minibatch loss at step 14600: 0.047173 Minibatch accuracy: 100.0% Validation accuracy: 87.8% Minibatch loss at step 14650: 0.611663 Minibatch accuracy: 81.2% Validation accuracy: 88.5% Minibatch loss at step 14700: 0.310703 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 14750: 0.285280 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 14800: 0.477825 Minibatch accuracy: 87.5% Validation accuracy: 88.2% Minibatch loss at step 14850: 0.301898 Minibatch accuracy: 87.5% Validation accuracy: 88.6% Minibatch loss at step 14900: 0.266927 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 14950: 0.401053 Minibatch accuracy: 93.8% Validation accuracy: 88.5% Minibatch loss at step 15000: 0.300880 Minibatch accuracy: 100.0% Validation accuracy: 88.3% Minibatch loss at step 15050: 0.685438 Minibatch accuracy: 81.2% Validation accuracy: 88.8% Minibatch loss at step 15100: 0.189911 Minibatch accuracy: 93.8% Validation accuracy: 88.7% Minibatch loss at step 15150: 0.255015 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 15200: 0.083468 Minibatch accuracy: 100.0% Validation accuracy: 88.4% Minibatch loss at step 15250: 0.641439 Minibatch accuracy: 75.0% Validation accuracy: 88.3% Minibatch loss at step 15300: 0.183658 Minibatch accuracy: 93.8% Validation accuracy: 88.5% Minibatch loss at step 15350: 0.020951 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 15400: 0.783557 Minibatch accuracy: 75.0% Validation accuracy: 88.5% Minibatch loss at step 15450: 0.691905 Minibatch accuracy: 81.2% Validation accuracy: 88.3% Minibatch loss at step 15500: 0.161925 Minibatch accuracy: 93.8% Validation accuracy: 88.8% Minibatch loss at step 15550: 0.345921 Minibatch accuracy: 87.5% Validation accuracy: 88.4% Minibatch loss at step 15600: 0.448692 Minibatch accuracy: 87.5% Validation accuracy: 88.3% Minibatch loss at step 15650: 0.473705 Minibatch accuracy: 87.5% Validation accuracy: 88.6% Minibatch loss at step 15700: 0.084393 Minibatch accuracy: 100.0% Validation accuracy: 88.6% Minibatch loss at step 15750: 0.207775 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 15800: 0.126755 Minibatch accuracy: 100.0% Validation accuracy: 88.6% Minibatch loss at step 15850: 0.267497 Minibatch accuracy: 93.8% Validation accuracy: 88.9% Minibatch loss at step 15900: 0.642480 Minibatch accuracy: 81.2% Validation accuracy: 89.0% Minibatch loss at step 15950: 0.096726 Minibatch accuracy: 93.8% Validation accuracy: 88.3% Minibatch loss at step 16000: 1.024041 Minibatch accuracy: 87.5% Validation accuracy: 88.5% Minibatch loss at step 16050: 0.080016 Minibatch accuracy: 100.0% Validation accuracy: 88.3% Minibatch loss at step 16100: 0.323796 Minibatch accuracy: 87.5% Validation accuracy: 88.5% Minibatch loss at step 16150: 0.119888 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 16200: 0.465717 Minibatch accuracy: 81.2% Validation accuracy: 88.3% Minibatch loss at step 16250: 0.177272 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 16300: 0.465884 Minibatch accuracy: 87.5% Validation accuracy: 88.5% Minibatch loss at step 16350: 0.391384 Minibatch accuracy: 81.2% Validation accuracy: 88.6% Minibatch loss at step 16400: 0.658646 Minibatch accuracy: 75.0% Validation accuracy: 88.7% Minibatch loss at step 16450: 0.640159 Minibatch accuracy: 81.2% Validation accuracy: 88.4% Minibatch loss at step 16500: 0.026741 Minibatch accuracy: 100.0% Validation accuracy: 88.5% Minibatch loss at step 16550: 0.468924 Minibatch accuracy: 81.2% Validation accuracy: 88.5% Minibatch loss at step 16600: 0.337119 Minibatch accuracy: 87.5% Validation accuracy: 88.7% Minibatch loss at step 16650: 0.614522 Minibatch accuracy: 75.0% Validation accuracy: 88.4% Minibatch loss at step 16700: 0.511785 Minibatch accuracy: 81.2% Validation accuracy: 88.8% Minibatch loss at step 16750: 0.521505 Minibatch accuracy: 81.2% Validation accuracy: 88.6% Minibatch loss at step 16800: 0.644770 Minibatch accuracy: 81.2% Validation accuracy: 88.6% Minibatch loss at step 16850: 0.742855 Minibatch accuracy: 75.0% Validation accuracy: 88.7% Minibatch loss at step 16900: 0.931507 Minibatch accuracy: 68.8% Validation accuracy: 88.6% Minibatch loss at step 16950: 0.210255 Minibatch accuracy: 93.8% Validation accuracy: 88.4% Minibatch loss at step 17000: 0.183624 Minibatch accuracy: 100.0% Validation accuracy: 88.7% Minibatch loss at step 17050: 0.308766 Minibatch accuracy: 93.8% Validation accuracy: 88.8% Minibatch loss at step 17100: 0.225872 Minibatch accuracy: 87.5% Validation accuracy: 88.7% Minibatch loss at step 17150: 0.325996 Minibatch accuracy: 93.8% Validation accuracy: 88.7% Minibatch loss at step 17200: 0.230195 Minibatch accuracy: 87.5% Validation accuracy: 88.8% Minibatch loss at step 17250: 0.547959 Minibatch accuracy: 87.5% Validation accuracy: 88.8% Minibatch loss at step 17300: 0.469168 Minibatch accuracy: 93.8% Validation accuracy: 88.9% Minibatch loss at step 17350: 0.212685 Minibatch accuracy: 93.8% Validation accuracy: 89.2% Minibatch loss at step 17400: 0.546534 Minibatch accuracy: 81.2% Validation accuracy: 88.5% Minibatch loss at step 17450: 0.272410 Minibatch accuracy: 87.5% Validation accuracy: 89.0% Minibatch loss at step 17500: 0.317612 Minibatch accuracy: 87.5% Validation accuracy: 88.5% Minibatch loss at step 17550: 0.484892 Minibatch accuracy: 87.5% Validation accuracy: 88.9% Minibatch loss at step 17600: 0.646033 Minibatch accuracy: 81.2% Validation accuracy: 88.8% Minibatch loss at step 17650: 0.209862 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 17700: 0.484305 Minibatch accuracy: 87.5% Validation accuracy: 88.7% Minibatch loss at step 17750: 0.251212 Minibatch accuracy: 93.8% Validation accuracy: 89.0% Minibatch loss at step 17800: 0.535176 Minibatch accuracy: 87.5% Validation accuracy: 88.8% Minibatch loss at step 17850: 0.298966 Minibatch accuracy: 87.5% Validation accuracy: 88.7% Minibatch loss at step 17900: 0.374506 Minibatch accuracy: 87.5% Validation accuracy: 89.1% Minibatch loss at step 17950: 0.356499 Minibatch accuracy: 87.5% Validation accuracy: 88.8% Minibatch loss at step 18000: 0.334625 Minibatch accuracy: 81.2% Validation accuracy: 88.2% Minibatch loss at step 18050: 0.343583 Minibatch accuracy: 87.5% Validation accuracy: 88.8% Minibatch loss at step 18100: 0.647946 Minibatch accuracy: 87.5% Validation accuracy: 89.2% Minibatch loss at step 18150: 0.221066 Minibatch accuracy: 87.5% Validation accuracy: 89.1% Minibatch loss at step 18200: 0.406646 Minibatch accuracy: 93.8% Validation accuracy: 89.0% Minibatch loss at step 18250: 0.407224 Minibatch accuracy: 81.2% Validation accuracy: 89.1% Minibatch loss at step 18300: 0.115090 Minibatch accuracy: 100.0% Validation accuracy: 88.9% Minibatch loss at step 18350: 0.321624 Minibatch accuracy: 87.5% Validation accuracy: 89.0% Minibatch loss at step 18400: 0.731389 Minibatch accuracy: 75.0% Validation accuracy: 88.5% Minibatch loss at step 18450: 0.288471 Minibatch accuracy: 93.8% Validation accuracy: 89.1% Minibatch loss at step 18500: 0.242199 Minibatch accuracy: 93.8% Validation accuracy: 88.6% Minibatch loss at step 18550: 0.298285 Minibatch accuracy: 81.2% Validation accuracy: 89.1% Minibatch loss at step 18600: 0.080742 Minibatch accuracy: 100.0% Validation accuracy: 88.8% Minibatch loss at step 18650: 0.327417 Minibatch accuracy: 87.5% Validation accuracy: 89.1% Minibatch loss at step 18700: 0.129839 Minibatch accuracy: 100.0% Validation accuracy: 89.1% Minibatch loss at step 18750: 0.682693 Minibatch accuracy: 75.0% Validation accuracy: 88.9% Minibatch loss at step 18800: 0.382109 Minibatch accuracy: 81.2% Validation accuracy: 89.0% Minibatch loss at step 18850: 0.477357 Minibatch accuracy: 81.2% Validation accuracy: 89.2% Minibatch loss at step 18900: 0.015490 Minibatch accuracy: 100.0% Validation accuracy: 89.4% Minibatch loss at step 18950: 0.127917 Minibatch accuracy: 100.0% Validation accuracy: 89.1% Minibatch loss at step 19000: 0.568930 Minibatch accuracy: 81.2% Validation accuracy: 88.8% Minibatch loss at step 19050: 0.059831 Minibatch accuracy: 100.0% Validation accuracy: 89.0% Minibatch loss at step 19100: 0.014571 Minibatch accuracy: 100.0% Validation accuracy: 89.1% Minibatch loss at step 19150: 0.449375 Minibatch accuracy: 93.8% Validation accuracy: 88.8% Minibatch loss at step 19200: 0.294453 Minibatch accuracy: 93.8% Validation accuracy: 88.8% Minibatch loss at step 19250: 0.913588 Minibatch accuracy: 87.5% Validation accuracy: 89.0% Minibatch loss at step 19300: 0.226687 Minibatch accuracy: 93.8% Validation accuracy: 89.1% Minibatch loss at step 19350: 0.138060 Minibatch accuracy: 100.0% Validation accuracy: 88.7% Minibatch loss at step 19400: 0.512782 Minibatch accuracy: 81.2% Validation accuracy: 89.1% Minibatch loss at step 19450: 0.192825 Minibatch accuracy: 93.8% Validation accuracy: 89.0% Minibatch loss at step 19500: 0.244256 Minibatch accuracy: 93.8% Validation accuracy: 89.0% Minibatch loss at step 19550: 0.229286 Minibatch accuracy: 93.8% Validation accuracy: 89.1% Minibatch loss at step 19600: 0.274848 Minibatch accuracy: 93.8% Validation accuracy: 89.0% Minibatch loss at step 19650: 0.315529 Minibatch accuracy: 87.5% Validation accuracy: 88.9% Minibatch loss at step 19700: 0.079131 Minibatch accuracy: 100.0% Validation accuracy: 89.0% Minibatch loss at step 19750: 0.500986 Minibatch accuracy: 87.5% Validation accuracy: 89.1% Minibatch loss at step 19800: 0.255140 Minibatch accuracy: 87.5% Validation accuracy: 89.4% Minibatch loss at step 19850: 0.130912 Minibatch accuracy: 100.0% Validation accuracy: 89.2% Minibatch loss at step 19900: 0.186173 Minibatch accuracy: 93.8% Validation accuracy: 89.6% Minibatch loss at step 19950: 0.748589 Minibatch accuracy: 62.5% Validation accuracy: 89.3% Minibatch loss at step 20000: 0.276946 Minibatch accuracy: 93.8% Validation accuracy: 89.1% Test accuracy: 95.0%
The accuracy is good, but not as good as the 3-layer network from the previous assignment.
The next version of the net uses dropout and learning rate decay:
batch_size = 16
patch_size = 5
depth = 16
num_hidden = 64
beta_regul = 1e-3
drop_out = 0.5
graph = tf.Graph()
with graph.as_default():
# Input data.
tf_train_dataset = tf.placeholder(
tf.float32, shape=(batch_size, image_size, image_size, num_channels))
tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels))
tf_valid_dataset = tf.constant(valid_dataset)
tf_test_dataset = tf.constant(test_dataset)
global_step = tf.Variable(0)
# Variables.
layer1_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, num_channels, depth], stddev=0.1))
layer1_biases = tf.Variable(tf.zeros([depth]))
layer2_weights = tf.Variable(tf.truncated_normal(
[patch_size, patch_size, depth, depth], stddev=0.1))
layer2_biases = tf.Variable(tf.constant(1.0, shape=[depth]))
size3 = ((image_size - patch_size + 1) // 2 - patch_size + 1) // 2
layer3_weights = tf.Variable(tf.truncated_normal(
[size3 * size3 * depth, num_hidden], stddev=0.1))
layer3_biases = tf.Variable(tf.constant(1.0, shape=[num_hidden]))
layer4_weights = tf.Variable(tf.truncated_normal(
[num_hidden, num_hidden], stddev=0.1))
layer4_biases = tf.Variable(tf.constant(1.0, shape=[num_hidden]))
layer5_weights = tf.Variable(tf.truncated_normal(
[num_hidden, num_labels], stddev=0.1))
layer5_biases = tf.Variable(tf.constant(1.0, shape=[num_labels]))
# Model.
def model(data, keep_prob):
# C1 input 28 x 28
conv1 = tf.nn.conv2d(data, layer1_weights, [1, 1, 1, 1], padding='VALID')
bias1 = tf.nn.relu(conv1 + layer1_biases)
# S2 input 24 x 24
pool2 = tf.nn.avg_pool(bias1, [1, 2, 2, 1], [1, 2, 2, 1], padding='VALID')
# C3 input 12 x 12
conv3 = tf.nn.conv2d(pool2, layer2_weights, [1, 1, 1, 1], padding='VALID')
bias3 = tf.nn.relu(conv3 + layer2_biases)
# S4 input 8 x 8
pool4 = tf.nn.avg_pool(bias3, [1, 2, 2, 1], [1, 2, 2, 1], padding='VALID')
# F5 input 4 x 4
shape = pool4.get_shape().as_list()
reshape = tf.reshape(pool4, [shape[0], shape[1] * shape[2] * shape[3]])
hidden5 = tf.nn.relu(tf.matmul(reshape, layer3_weights) + layer3_biases)
# F6
drop5 = tf.nn.dropout(hidden5, keep_prob)
hidden6 = tf.nn.relu(tf.matmul(hidden5, layer4_weights) + layer4_biases)
drop6 = tf.nn.dropout(hidden6, keep_prob)
return tf.matmul(drop6, layer5_weights) + layer5_biases
# Training computation.
logits = model(tf_train_dataset, drop_out)
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits, tf_train_labels))
# Optimizer.
learning_rate = tf.train.exponential_decay(0.05, global_step, 1000, 0.85, staircase=True)
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss, global_step=global_step)
# Predictions for the training, validation, and test data.
train_prediction = tf.nn.softmax(logits)
valid_prediction = tf.nn.softmax(model(tf_valid_dataset, 1.0))
test_prediction = tf.nn.softmax(model(tf_test_dataset, 1.0))
num_steps = 5001
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
print('Initialized')
for step in range(num_steps):
offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
batch_data = train_dataset[offset:(offset + batch_size), :, :, :]
batch_labels = train_labels[offset:(offset + batch_size), :]
feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels}
_, l, predictions = session.run(
[optimizer, loss, train_prediction], feed_dict=feed_dict)
if (step % 50 == 0):
print('Minibatch loss at step %d: %f' % (step, l))
print('Minibatch accuracy: %.1f%%' % accuracy(predictions, batch_labels))
print('Validation accuracy: %.1f%%' % accuracy(
valid_prediction.eval(), valid_labels))
print('Test accuracy: %.1f%%' % accuracy(test_prediction.eval(), test_labels))
Initialized Minibatch loss at step 0: 2.466877 Minibatch accuracy: 25.0% Validation accuracy: 10.3% Minibatch loss at step 50: 2.161342 Minibatch accuracy: 18.8% Validation accuracy: 26.2% Minibatch loss at step 100: 1.929306 Minibatch accuracy: 37.5% Validation accuracy: 44.5% Minibatch loss at step 150: 1.402269 Minibatch accuracy: 43.8% Validation accuracy: 56.8% Minibatch loss at step 200: 1.576180 Minibatch accuracy: 43.8% Validation accuracy: 59.8% Minibatch loss at step 250: 1.603382 Minibatch accuracy: 56.2% Validation accuracy: 67.5% Minibatch loss at step 300: 0.996797 Minibatch accuracy: 75.0% Validation accuracy: 74.0% Minibatch loss at step 350: 0.924196 Minibatch accuracy: 81.2% Validation accuracy: 72.1% Minibatch loss at step 400: 0.585693 Minibatch accuracy: 75.0% Validation accuracy: 72.5% Minibatch loss at step 450: 1.226259 Minibatch accuracy: 75.0% Validation accuracy: 74.3% Minibatch loss at step 500: 1.268786 Minibatch accuracy: 75.0% Validation accuracy: 75.3% Minibatch loss at step 550: 0.794506 Minibatch accuracy: 75.0% Validation accuracy: 76.7% Minibatch loss at step 600: 0.704979 Minibatch accuracy: 81.2% Validation accuracy: 77.4% Minibatch loss at step 650: 0.899304 Minibatch accuracy: 81.2% Validation accuracy: 78.2% Minibatch loss at step 700: 1.260285 Minibatch accuracy: 68.8% Validation accuracy: 79.6% Minibatch loss at step 750: 0.351961 Minibatch accuracy: 100.0% Validation accuracy: 79.3% Minibatch loss at step 800: 0.666271 Minibatch accuracy: 75.0% Validation accuracy: 79.1% Minibatch loss at step 850: 0.991982 Minibatch accuracy: 68.8% Validation accuracy: 78.4% Minibatch loss at step 900: 0.785811 Minibatch accuracy: 81.2% Validation accuracy: 79.7% Minibatch loss at step 950: 0.733023 Minibatch accuracy: 68.8% Validation accuracy: 80.3% Minibatch loss at step 1000: 0.335110 Minibatch accuracy: 87.5% Validation accuracy: 80.0% Minibatch loss at step 1050: 0.680891 Minibatch accuracy: 81.2% Validation accuracy: 80.1% Minibatch loss at step 1100: 0.795097 Minibatch accuracy: 68.8% Validation accuracy: 81.0% Minibatch loss at step 1150: 0.398647 Minibatch accuracy: 87.5% Validation accuracy: 80.9% Minibatch loss at step 1200: 1.061081 Minibatch accuracy: 62.5% Validation accuracy: 81.2% Minibatch loss at step 1250: 0.686575 Minibatch accuracy: 75.0% Validation accuracy: 81.3% Minibatch loss at step 1300: 0.320834 Minibatch accuracy: 93.8% Validation accuracy: 81.2% Minibatch loss at step 1350: 1.318262 Minibatch accuracy: 56.2% Validation accuracy: 80.8% Minibatch loss at step 1400: 0.357428 Minibatch accuracy: 87.5% Validation accuracy: 81.5% Minibatch loss at step 1450: 0.332784 Minibatch accuracy: 87.5% Validation accuracy: 82.0% Minibatch loss at step 1500: 0.748170 Minibatch accuracy: 81.2% Validation accuracy: 81.8% Minibatch loss at step 1550: 0.876032 Minibatch accuracy: 68.8% Validation accuracy: 81.9% Minibatch loss at step 1600: 1.237206 Minibatch accuracy: 68.8% Validation accuracy: 81.4% Minibatch loss at step 1650: 0.727465 Minibatch accuracy: 81.2% Validation accuracy: 81.5% Minibatch loss at step 1700: 0.889423 Minibatch accuracy: 81.2% Validation accuracy: 81.2% Minibatch loss at step 1750: 0.521735 Minibatch accuracy: 87.5% Validation accuracy: 82.3% Minibatch loss at step 1800: 0.735978 Minibatch accuracy: 81.2% Validation accuracy: 82.1% Minibatch loss at step 1850: 1.355421 Minibatch accuracy: 62.5% Validation accuracy: 81.9% Minibatch loss at step 1900: 0.470354 Minibatch accuracy: 93.8% Validation accuracy: 81.5% Minibatch loss at step 1950: 1.045202 Minibatch accuracy: 56.2% Validation accuracy: 83.1% Minibatch loss at step 2000: 0.212505 Minibatch accuracy: 93.8% Validation accuracy: 82.2% Minibatch loss at step 2050: 0.946967 Minibatch accuracy: 68.8% Validation accuracy: 83.1% Minibatch loss at step 2100: 0.289381 Minibatch accuracy: 93.8% Validation accuracy: 82.5% Minibatch loss at step 2150: 0.503749 Minibatch accuracy: 87.5% Validation accuracy: 83.1% Minibatch loss at step 2200: 0.464120 Minibatch accuracy: 81.2% Validation accuracy: 83.2% Minibatch loss at step 2250: 0.684509 Minibatch accuracy: 87.5% Validation accuracy: 83.7% Minibatch loss at step 2300: 0.723519 Minibatch accuracy: 87.5% Validation accuracy: 82.7% Minibatch loss at step 2350: 0.435728 Minibatch accuracy: 81.2% Validation accuracy: 83.4% Minibatch loss at step 2400: 0.848848 Minibatch accuracy: 75.0% Validation accuracy: 83.5% Minibatch loss at step 2450: 0.610091 Minibatch accuracy: 81.2% Validation accuracy: 84.2% Minibatch loss at step 2500: 0.973621 Minibatch accuracy: 75.0% Validation accuracy: 83.9% Minibatch loss at step 2550: 0.680162 Minibatch accuracy: 81.2% Validation accuracy: 83.7% Minibatch loss at step 2600: 0.277876 Minibatch accuracy: 93.8% Validation accuracy: 84.0% Minibatch loss at step 2650: 0.717264 Minibatch accuracy: 68.8% Validation accuracy: 83.9% Minibatch loss at step 2700: 0.608532 Minibatch accuracy: 87.5% Validation accuracy: 83.9% Minibatch loss at step 2750: 1.166722 Minibatch accuracy: 75.0% Validation accuracy: 83.5% Minibatch loss at step 2800: 0.666192 Minibatch accuracy: 75.0% Validation accuracy: 84.4% Minibatch loss at step 2850: 0.112893 Minibatch accuracy: 93.8% Validation accuracy: 83.5% Minibatch loss at step 2900: 0.545045 Minibatch accuracy: 81.2% Validation accuracy: 83.9% Minibatch loss at step 2950: 0.628278 Minibatch accuracy: 87.5% Validation accuracy: 84.4% Minibatch loss at step 3000: 0.785475 Minibatch accuracy: 87.5% Validation accuracy: 83.9% Minibatch loss at step 3050: 0.930228 Minibatch accuracy: 87.5% Validation accuracy: 84.3% Minibatch loss at step 3100: 0.791880 Minibatch accuracy: 75.0% Validation accuracy: 84.4% Minibatch loss at step 3150: 0.641446 Minibatch accuracy: 75.0% Validation accuracy: 84.5% Minibatch loss at step 3200: 0.614943 Minibatch accuracy: 75.0% Validation accuracy: 84.0% Minibatch loss at step 3250: 0.768515 Minibatch accuracy: 81.2% Validation accuracy: 83.3% Minibatch loss at step 3300: 0.164835 Minibatch accuracy: 93.8% Validation accuracy: 84.5% Minibatch loss at step 3350: 0.500897 Minibatch accuracy: 87.5% Validation accuracy: 84.5% Minibatch loss at step 3400: 1.111665 Minibatch accuracy: 75.0% Validation accuracy: 84.7% Minibatch loss at step 3450: 0.670814 Minibatch accuracy: 75.0% Validation accuracy: 84.2% Minibatch loss at step 3500: 0.433042 Minibatch accuracy: 87.5% Validation accuracy: 84.6% Minibatch loss at step 3550: 0.315746 Minibatch accuracy: 93.8% Validation accuracy: 84.5% Minibatch loss at step 3600: 0.355311 Minibatch accuracy: 81.2% Validation accuracy: 84.4% Minibatch loss at step 3650: 0.756499 Minibatch accuracy: 68.8% Validation accuracy: 84.8% Minibatch loss at step 3700: 1.074692 Minibatch accuracy: 75.0% Validation accuracy: 84.5% Minibatch loss at step 3750: 0.792253 Minibatch accuracy: 75.0% Validation accuracy: 85.0% Minibatch loss at step 3800: 0.049834 Minibatch accuracy: 100.0% Validation accuracy: 84.9% Minibatch loss at step 3850: 0.917343 Minibatch accuracy: 81.2% Validation accuracy: 85.2% Minibatch loss at step 3900: 0.633758 Minibatch accuracy: 87.5% Validation accuracy: 84.8% Minibatch loss at step 3950: 0.091348 Minibatch accuracy: 93.8% Validation accuracy: 84.9% Minibatch loss at step 4000: 0.386830 Minibatch accuracy: 93.8% Validation accuracy: 84.8% Minibatch loss at step 4050: 0.664441 Minibatch accuracy: 87.5% Validation accuracy: 84.9% Minibatch loss at step 4100: 0.818731 Minibatch accuracy: 81.2% Validation accuracy: 85.3% Minibatch loss at step 4150: 1.098946 Minibatch accuracy: 68.8% Validation accuracy: 85.2% Minibatch loss at step 4200: 0.427599 Minibatch accuracy: 93.8% Validation accuracy: 85.0% Minibatch loss at step 4250: 0.701067 Minibatch accuracy: 81.2% Validation accuracy: 85.1% Minibatch loss at step 4300: 0.915477 Minibatch accuracy: 75.0% Validation accuracy: 85.0% Minibatch loss at step 4350: 0.301903 Minibatch accuracy: 87.5% Validation accuracy: 85.5% Minibatch loss at step 4400: 1.344068 Minibatch accuracy: 68.8% Validation accuracy: 85.0% Minibatch loss at step 4450: 0.401401 Minibatch accuracy: 87.5% Validation accuracy: 85.4% Minibatch loss at step 4500: 0.523591 Minibatch accuracy: 87.5% Validation accuracy: 85.3% Minibatch loss at step 4550: 0.539713 Minibatch accuracy: 81.2% Validation accuracy: 85.3% Minibatch loss at step 4600: 0.340253 Minibatch accuracy: 87.5% Validation accuracy: 85.7% Minibatch loss at step 4650: 1.693758 Minibatch accuracy: 81.2% Validation accuracy: 85.5% Minibatch loss at step 4700: 0.515037 Minibatch accuracy: 81.2% Validation accuracy: 85.4% Minibatch loss at step 4750: 1.176289 Minibatch accuracy: 62.5% Validation accuracy: 85.6% Minibatch loss at step 4800: 0.575342 Minibatch accuracy: 81.2% Validation accuracy: 85.6% Minibatch loss at step 4850: 0.411686 Minibatch accuracy: 87.5% Validation accuracy: 85.7% Minibatch loss at step 4900: 0.197935 Minibatch accuracy: 93.8% Validation accuracy: 85.7% Minibatch loss at step 4950: 0.303288 Minibatch accuracy: 87.5% Validation accuracy: 85.8% Minibatch loss at step 5000: 1.693749 Minibatch accuracy: 68.8% Validation accuracy: 85.5% Test accuracy: 91.9%
Well, the accuracy is worst. This net has many meta parameters and I don't feel comfortable in tuning them randomly. I should probably change the depth and make it different between the layers, since it looks like the increasing number of feature maps is a key design item.