Notebook
# Minimize: Gradient Descent using derivative: W -= learning_rate * derivative learning_rate = 0.1 gradient = tf.reduce_mean((W * X - Y) * X) descent = W - learning_rate * gradient update = W.assign(descent)
# Minimize: Gradient Descent Magic optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1) train = optimizer.minimize(cost)