#!/usr/bin/env python # coding: utf-8 # # CS 20 : TensorFlow for Deep Learning Research # ## Lecture 03 : Linear and Logistic Regression # ### Linear Regression with tf.data # same contents, but different style with [Lec03_Linear Regression with tf.data.ipynb](https://nbviewer.jupyter.org/github/aisolab/CS20/blob/master/Lec03_Linear%20and%20Logistic%20Regression/Lec03_Linear%20Regression%20with%20tf.data.ipynb) # # * Creating the input pipeline with `tf.data` # * Using `eager execution` # # **Reference** # # * https://jhui.github.io/2017/11/21/TensorFlow-Importing-data/ # * https://towardsdatascience.com/how-to-use-dataset-in-tensorflow-c758ef9e4428 # * https://stackoverflow.com/questions/47356764/how-to-use-tensorflow-dataset-api-with-training-and-validation-sets # ### Setup # In[1]: from __future__ import absolute_import, division, print_function import os, sys import numpy as np import pandas as pd import matplotlib.pyplot as plt import tensorflow as tf from pprint import pprint get_ipython().run_line_magic('matplotlib', 'inline') print(tf.__version__) tf.enable_eager_execution() # ### Build input pipeline # In[2]: train_dir = os.listdir('../data/lecture03/example_with_data/train_dir/') train_dir = list(map(lambda path : '../data/lecture03/example_with_data/train_dir/' + path, train_dir)) pprint(train_dir, compact = True) # In[3]: val_dir = '../data/lecture03/example_with_data/val_dir/birth_life_2010_val.txt' pprint(val_dir) # In[4]: # hyper parameters epochs = 100 batch_size = 8 # In[5]: # datasets construction # for training dataset tr_dataset = tf.data.TextLineDataset(filenames = train_dir) tr_dataset = tr_dataset.map(lambda record : tf.decode_csv(records = record, record_defaults = [[''],[.0],[.0]], field_delim = '\t')[1:]) tr_dataset = tr_dataset.shuffle(200) tr_dataset = tr_dataset.batch(batch_size = batch_size) # for validation dataset val_dataset = tf.data.TextLineDataset(filenames = val_dir) val_dataset = val_dataset.map(lambda record : tf.decode_csv(records = record, record_defaults = [[''],[.0],[.0]], field_delim = '\t')[1:]) val_dataset = val_dataset.batch(batch_size = batch_size) # ### Define the graph of Simple Linear Regression # In[6]: # create weight and bias, initialized to 0 w = tf.Variable(initial_value=tf.constant(.0), name='weight') b = tf.Variable(initial_value=tf.constant(.0), name='bias') # construct model to predict Y def model(x): yhat = x * w + b return yhat # use the square error as loss function def loss_fn(model, x, y): loss = tf.reduce_mean(tf.square(y - model(x))) return loss # using gradient descent with learning rate of 0.01 to minimize loss opt = tf.train.GradientDescentOptimizer(learning_rate=.01) # ### Training # In[7]: # create writer for tensorboard logdir = '../graphs/lecture03/linreg_mse_with_tf_data_de/' summary_writer = tf.contrib.summary.create_file_writer(logdir=logdir) # In[8]: ''' # hyper parameters epochs = 100 batch_size = 8 ''' global_step = tf.train.get_or_create_global_step() tr_loss_hist = [] val_loss_hist = [] for epoch in range(epochs): avg_tr_loss = 0 avg_val_loss = 0 tr_step = 0 val_step = 0 with summary_writer.as_default(), tf.contrib.summary.always_record_summaries(): # for tensorboard # for training for x_mb, y_mb in tr_dataset: with tf.GradientTape() as tape: tr_loss = loss_fn(model=model, x=x_mb, y=y_mb) tf.contrib.summary.scalar(name='tr_loss', tensor=tr_loss) avg_tr_loss += tr_loss tr_step +=1 grads = tape.gradient(target=tr_loss, sources=[w, b]) opt.apply_gradients(grads_and_vars=zip(grads, [w,b])) else: avg_tr_loss /= tr_step tr_loss_hist.append(avg_tr_loss) # for validation for x_mb, y_mb in val_dataset: val_loss = loss_fn(model=model, x=x_mb, y=y_mb) tf.contrib.summary.scalar(name='val_loss', tensor=val_loss) avg_val_loss += val_loss val_step += 1 else: avg_val_loss /= val_step val_loss_hist.append(avg_val_loss) if (epoch + 1) % 10 == 0: print('epoch : {:3}, tr_loss : {:.3f}, val_loss : {:.3f}'.format(epoch + 1, avg_tr_loss, avg_val_loss)) # ### Visualization # In[9]: plt.plot(tr_loss_hist, label = 'train') plt.plot(val_loss_hist, label = 'validation') plt.legend() # In[10]: data = pd.read_table('../data/lecture03/example_with_placeholder/birth_life_2010.txt') # loading data for Visualization plt.plot(data.iloc[:,1], data.iloc[:,2], 'bo', label='Real data') plt.plot(data.iloc[:,1], data.iloc[:,1] * w + b, 'r', label='Predicted data') plt.legend()