# Load libraries
import torch
import torch.nn as nn
from res.plot_lib import set_default, show_scatterplot, plot_bases
from matplotlib.pyplot import plot, title, axis, figure, gca, gcf
from numpy import clip
# Set style (needs to be in a new cell)
%matplotlib inline
set_default()
torch.manual_seed(0)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# generate some points in 2-D space
n_points = 1_000
X = torch.randn(n_points, 2).to(device)
# colors [0 – 511]^2
x_min = -1.5 #X.min(0)[0] #+ 1
x_max = +1.5 #X.max(0)[0] #- 1
colors = (X - x_min) / (x_max - x_min)
colors = (colors * 511).short().numpy()
colors = clip(colors, 0, 511)
figure().add_axes([0, 0, 1, 1])
show_scatterplot(X, colors, title='X')
OI = torch.cat((torch.zeros(2, 2), torch.eye(2))).to(device)
plot_bases(OI)
$ \begin{equation} W = U \left[ {\begin{array}{cc} s_1 & 0 \\ 0 & s_2 \\ \end{array} } \right] V^\top \end{equation} $
show_scatterplot(X, colors, title='X')
plot_bases(OI)
for i in range(10):
figure()
# create a random matrix
W = torch.randn(2, 2).to(device)
# transform points
Y = X @ W.t()
# compute singular values
U, S, V = torch.svd(W)
# plot transformed points
show_scatterplot(Y, colors, title='y = Wx, singular values : [{:.3f}, {:.3f}]'.format(S[0], S[1]))
# transform the basis
new_OI = OI @ W
# plot old and new basis
plot_bases(OI)
# plot_bases(new_OI)
model = nn.Sequential(
nn.Linear(2, 2, bias=False)
)
model.to(device)
with torch.no_grad():
Y = model(X)
figure()
show_scatterplot(Y, colors)
plot_bases(model(OI))
$ f(x)= \tanh \left( \left[ {\begin{array}{cc} s & 0 \\ 0 & s \\ \end{array} } \right] x \right) $
z = torch.linspace(-10, 10, 101)
s = torch.tanh(z)
plot(z.numpy(), s.numpy())
title('tanh() non linearity');
show_scatterplot(X, colors, title='X')
plot_bases(OI)
model = nn.Sequential(
nn.Linear(2, 2, bias=False),
nn.Tanh()
)
model.to(device)
for s in range(1, 6):
figure()
W = s * torch.eye(2)
model[0].weight.data.copy_(W)
Y = model(X).data
show_scatterplot(Y, colors, title=f'f(x), s={s}')
plot_bases(OI, width=0.01)
show_scatterplot(X, colors, title='x')
n_hidden = 5
# NL = nn.ReLU() # ()^+
NL = nn.Tanh()
models = list()
for i in range(5):
# create 1-layer neural networks with random weights
model = nn.Sequential(
nn.Linear(2, n_hidden),
NL,
nn.Linear(n_hidden, 2)
)
model.to(device)
models.append(model)
with torch.no_grad():
Y = model(X)
figure()
show_scatterplot(Y, colors, title='f(x)')
# plot_bases(OI)
# deeper network with random weights
show_scatterplot(X, colors, title='x')
n_hidden = 5
NL = nn.ReLU()
# NL = nn.Tanh()
for i in range(5):
model = nn.Sequential(
nn.Linear(2, n_hidden),
NL,
nn.Linear(n_hidden, n_hidden),
NL,
nn.Linear(n_hidden, n_hidden),
NL,
nn.Linear(n_hidden, n_hidden),
NL,
nn.Linear(n_hidden, 2)
)
model.to(device)
with torch.no_grad():
Y = model(X).detach()
figure()
show_scatterplot(Y, colors, title='f(x)', axis=False)
show_scatterplot(X, colors, title='x')
with torch.no_grad():
Y = models[2](X)
figure()
show_scatterplot(Y, colors, title='f(x)')
def interpolate(X_in, X_out, steps, p=1/50, plotting_grid=False, ratio='1:1'):
N = 1000
for t in range(steps):
# a = (t / (steps - 1)) ** p
a = ((p + 1)**(t / (steps - 1)) - 1) / p
gca().cla()
# plt.text(0, 5, action, color='w', horizontalalignment='center', verticalalignment='center')
show_scatterplot(a * X_out + (1 - a) * X_in, colors, title='f(x)')
if plotting_grid: plot_grid(a * X_out[N:] + (1 - a) * X_in[N:])
gcf().canvas.draw()
%matplotlib widget
# fig = figure(figsize=(19.2, 10.8)) # resolution is 100 px per inch => 1920 x 1080
fig = figure(figsize=(10, 5))
ax = fig.add_axes([0, 0, 1, 1]) # stretched the plot area to the whole figure
axis('off');
# Animate input/output
steps = 150
# steps = 1500
interpolate(X, Y, steps, p=.001)