import warnings
warnings.filterwarnings('ignore')
from collections import Counter
from pprint import pprint
import pandas as pd
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import keras
import tensorflow as tf
from keras.layers import Activation, Dense
from tqdm import tqdm
import watermark
%load_ext watermark
%matplotlib inline
We start by print out the versions of the libraries we're using for future reference
%watermark -n -v -m -g -iv
Python implementation: CPython Python version : 3.11.5 IPython version : 8.12.3 Compiler : Clang 14.0.6 OS : Darwin Release : 23.3.0 Machine : arm64 Processor : arm CPU cores : 16 Architecture: 64bit Git hash: 3022f3d8aeb2c24ebf2d47d4e9999181a7ad777d json : 2.0.9 matplotlib: 3.7.2 pandas : 2.0.3 tensorflow: 2.12.0 watermark : 2.4.3 numpy : 1.23.5 keras : 2.12.0
Load default figure style
plt.style.use('d4sci.mplstyle')
colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
We start by setting up the range of the input values we'll use for vizualization purposes
z = np.linspace(-6, 6, 100)
def gradient(values, z):
delta = z[1]-z[0]
values = np.diff(values)
return values/delta
linear = keras.layers.Activation('linear')
values = linear(z).numpy()
grad = gradient(values, z)
plt.plot(z, values, '-')
plt.plot(z[1:], grad, '-')
plt.xlabel('z')
plt.title('Linear activation function')
plt.legend(['function', 'gradient'])
<matplotlib.legend.Legend at 0x157e28650>
sigmoid = keras.layers.Activation('sigmoid')
values = sigmoid(z).numpy()
grad = gradient(values, z)
plt.plot(z, values, '-')
plt.plot(z[1:], grad, '-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Sigmoid activation function')
Text(0.5, 1.0, 'Sigmoid activation function')
relu = keras.layers.Activation('relu')
values = relu(z).numpy()
grad = gradient(values, z)
plt.plot(z, values, '-')
plt.plot(z[1:], grad, '-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Rectified Linear activation function')
Text(0.5, 1.0, 'Rectified Linear activation function')
tanh = keras.layers.Activation('tanh')
values = tanh(z).numpy()
grad = gradient(values, z)
plt.plot(z, values, '-')
plt.plot(z[1:], grad, '-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Hyperbolic Tangent activation function')
Text(0.5, 1.0, 'Hyperbolic Tangent activation function')