import numpy as np
import pandas as pd
import seaborn as sns
sns.set_theme()
df = pd.DataFrame({'x': np.arange(-6, 6.01, 0.01)})
df
x | |
---|---|
0 | -6.00 |
1 | -5.99 |
2 | -5.98 |
3 | -5.97 |
4 | -5.96 |
... | ... |
1196 | 5.96 |
1197 | 5.97 |
1198 | 5.98 |
1199 | 5.99 |
1200 | 6.00 |
1201 rows × 1 columns
def linear(x):
return x
df['linear'] = linear(df['x'])
df.plot(x='x', y='linear')
<Axes: xlabel='x'>
def binary(x):
if x < 0:
return 0
else:
return 1
df['binary'] = df['x'].apply(binary)
df.plot(x='x', y='binary')
<Axes: xlabel='x'>
def sigmoid(x):
return 1 / (1 + np.exp(-x))
df['sigmoid'] = df['x'].apply(sigmoid)
df.plot(x='x', y='sigmoid')
<Axes: xlabel='x'>
df['sigmoid_grad'] = df['sigmoid'] * (1 - df['sigmoid'])
df.plot(x='x', y=['sigmoid', 'sigmoid_grad'])
<Axes: xlabel='x'>
def sigmoid_grad(x):
return sigmoid(x) * (1 - sigmoid(x))
index = np.arange(-20, 21, 1)
pd.Series(data=sigmoid_grad(index), index=index).plot(logy=True, ylim=(1E-10, 1), xlabel='x', ylabel='sigmoid_grad')
<Axes: xlabel='x', ylabel='sigmoid_grad'>
sigmoid_grad(index)
array([2.06115361e-09, 5.60279637e-09, 1.52299793e-08, 4.13993738e-08, 1.12535149e-07, 3.05902133e-07, 8.31527336e-07, 2.26031919e-06, 6.14413685e-06, 1.67011429e-05, 4.53958077e-05, 1.23379350e-04, 3.35237671e-04, 9.10221180e-04, 2.46650929e-03, 6.64805667e-03, 1.76627062e-02, 4.51766597e-02, 1.04993585e-01, 1.96611933e-01, 2.50000000e-01, 1.96611933e-01, 1.04993585e-01, 4.51766597e-02, 1.76627062e-02, 6.64805667e-03, 2.46650929e-03, 9.10221180e-04, 3.35237671e-04, 1.23379350e-04, 4.53958077e-05, 1.67011429e-05, 6.14413685e-06, 2.26031919e-06, 8.31527336e-07, 3.05902133e-07, 1.12535149e-07, 4.13993739e-08, 1.52299793e-08, 5.60279642e-09, 2.06115369e-09])
import tensorflow as tf
2023-07-24 11:07:18.321966: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
def my_show(df, tf_func):
name = tf_func.__name__.split('.')[-1]
grad_name = name+'_grad'
x = tf.constant(df['x'])
with tf.GradientTape() as tape:
tape.watch(x)
y = tf_func(x)
dy_dx = tape.gradient(y, x)
df[name] = y.numpy()
df[grad_name] = dy_dx.numpy()
df.plot(x='x', y=[name, grad_name])
my_show(df, tf.keras.activations.tanh)
x = tf.constant(df['x'])
with tf.GradientTape() as tape:
tape.watch(x)
y = tf.keras.activations.tanh(x)
dy_dx = tape.gradient(y, x)
df['tanh'] = y.numpy()
df['tanh_grad'] = dy_dx.numpy()
df.plot(x='x', y=['tanh', 'tanh_grad'])
<Axes: xlabel='x'>
my_show(df, tf.keras.activations.relu)
x = tf.constant([0.0])
with tf.GradientTape() as tape:
tape.watch(x)
y = tf.keras.activations.relu(x)
dy_dx = tape.gradient(y, x)
print(y.numpy(), dy_dx.numpy())
[0.] [0.]
def leaky_relu(x):
return tf.keras.activations.relu(x, alpha=0.01)
my_show(df, leaky_relu)
my_show(df, tf.keras.activations.elu)
my_show(df, tf.keras.activations.selu)
from scipy.stats import norm
x = np.linspace(norm.ppf(0.01),
norm.ppf(0.99), 100)
pd.DataFrame({'X': x, 'gauss_pdf': norm.pdf(x), 'gauss_cdf': norm.cdf(x)}).plot(x='X')
<Axes: xlabel='X'>
my_show(df, tf.keras.activations.gelu)
from scipy.stats import logistic
x = np.linspace(logistic.ppf(0.01),
logistic.ppf(0.99), 100)
pd.DataFrame({'X': x, 'logistic_pdf': logistic.pdf(x), 'logistic_cdf': logistic.cdf(x), 'gauss_pdf': norm.pdf(x), 'gauss_cdf': norm.cdf(x)}).plot(x='X')
<Axes: xlabel='X'>
my_show(df, tf.keras.activations.swish)
df[300:850].plot(x='x', y=['binary', 'sigmoid', 'tanh', 'relu'])
<Axes: xlabel='x'>
df[400:750].plot(x='x', y=['relu', 'elu', 'selu', 'gelu', 'swish'])
<Axes: xlabel='x'>