Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka CPython 3.6.8 IPython 7.2.0 torch 1.0.0
A convolutional autoencoder using deconvolutional layers that compresses 768-pixel MNIST images down to a 7x7x8 (392 pixel) representation without using pooling operations but increasing the stride in convolutional layers.
import time
import numpy as np
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import datasets
from torchvision import transforms
if torch.cuda.is_available():
torch.backends.cudnn.deterministic = True
##########################
### SETTINGS
##########################
# Device
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('Device:', device)
# Hyperparameters
random_seed = 456
learning_rate = 0.005
num_epochs = 10
batch_size = 128
##########################
### MNIST DATASET
##########################
# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=batch_size,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
# Checking the dataset
for images, labels in train_loader:
print('Image batch dimensions:', images.shape)
print('Image label dimensions:', labels.shape)
break
Device: cuda:0 Image batch dimensions: torch.Size([128, 1, 28, 28]) Image label dimensions: torch.Size([128])
##########################
### MODEL
##########################
class ConvolutionalAutoencoder(torch.nn.Module):
def __init__(self):
super(ConvolutionalAutoencoder, self).__init__()
# calculate same padding:
# (w - k + 2*p)/s + 1 = o
# => p = (s(o-1) - w + k)/2
### ENCODER
# 28x28x1 => 14x14x4
self.conv_1 = torch.nn.Conv2d(in_channels=1,
out_channels=4,
kernel_size=(3, 3),
stride=(2, 2),
# floor((2(14-1) - 28 + 3) / 2) = 0
padding=0)
# 14x14x4 => 7x7x8
self.conv_2 = torch.nn.Conv2d(in_channels=4,
out_channels=8,
kernel_size=(3, 3),
stride=(2, 2),
# ceil((2(7-1) - 14 + 3) / 2) = 1
padding=1)
### DECODER
# 7x7x8 => 15x15x4
self.deconv_1 = torch.nn.ConvTranspose2d(in_channels=8,
out_channels=4,
kernel_size=(3, 3),
stride=(2, 2),
padding=0)
# 15x15x4 => 29x29x1
self.deconv_2 = torch.nn.ConvTranspose2d(in_channels=4,
out_channels=1,
kernel_size=(3, 3),
stride=(2, 2),
padding=1)
def forward(self, x):
### ENCODER
x = self.conv_1(x)
x = F.leaky_relu(x)
x = self.conv_2(x)
x = F.leaky_relu(x)
### DECODER
x = self.deconv_1(x)
x = F.leaky_relu(x)
x = self.deconv_2(x)
x = F.leaky_relu(x)
x = x[:, :, :-1, :-1]
x = torch.sigmoid(x)
return x
torch.manual_seed(random_seed)
model = ConvolutionalAutoencoder()
model = model.to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
start_time = time.time()
for epoch in range(num_epochs):
for batch_idx, (features, targets) in enumerate(train_loader):
# don't need labels, only the images (features)
features = features.to(device)
### FORWARD AND BACK PROP
decoded = model(features)
cost = F.binary_cross_entropy(decoded, features)
optimizer.zero_grad()
cost.backward()
### UPDATE MODEL PARAMETERS
optimizer.step()
### LOGGING
if not batch_idx % 50:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Cost: %.4f'
%(epoch+1, num_epochs, batch_idx,
len(train_loader), cost))
print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/010 | Batch 000/469 | Cost: 0.7184 Epoch: 001/010 | Batch 050/469 | Cost: 0.6903 Epoch: 001/010 | Batch 100/469 | Cost: 0.6569 Epoch: 001/010 | Batch 150/469 | Cost: 0.5834 Epoch: 001/010 | Batch 200/469 | Cost: 0.2208 Epoch: 001/010 | Batch 250/469 | Cost: 0.1800 Epoch: 001/010 | Batch 300/469 | Cost: 0.1723 Epoch: 001/010 | Batch 350/469 | Cost: 0.1573 Epoch: 001/010 | Batch 400/469 | Cost: 0.1450 Epoch: 001/010 | Batch 450/469 | Cost: 0.1451 Time elapsed: 0.12 min Epoch: 002/010 | Batch 000/469 | Cost: 0.1384 Epoch: 002/010 | Batch 050/469 | Cost: 0.1396 Epoch: 002/010 | Batch 100/469 | Cost: 0.1325 Epoch: 002/010 | Batch 150/469 | Cost: 0.1306 Epoch: 002/010 | Batch 200/469 | Cost: 0.1284 Epoch: 002/010 | Batch 250/469 | Cost: 0.1256 Epoch: 002/010 | Batch 300/469 | Cost: 0.1235 Epoch: 002/010 | Batch 350/469 | Cost: 0.1256 Epoch: 002/010 | Batch 400/469 | Cost: 0.1251 Epoch: 002/010 | Batch 450/469 | Cost: 0.1195 Time elapsed: 0.24 min Epoch: 003/010 | Batch 000/469 | Cost: 0.1189 Epoch: 003/010 | Batch 050/469 | Cost: 0.1209 Epoch: 003/010 | Batch 100/469 | Cost: 0.1197 Epoch: 003/010 | Batch 150/469 | Cost: 0.1147 Epoch: 003/010 | Batch 200/469 | Cost: 0.1152 Epoch: 003/010 | Batch 250/469 | Cost: 0.1142 Epoch: 003/010 | Batch 300/469 | Cost: 0.1146 Epoch: 003/010 | Batch 350/469 | Cost: 0.1182 Epoch: 003/010 | Batch 400/469 | Cost: 0.1150 Epoch: 003/010 | Batch 450/469 | Cost: 0.1099 Time elapsed: 0.36 min Epoch: 004/010 | Batch 000/469 | Cost: 0.1165 Epoch: 004/010 | Batch 050/469 | Cost: 0.1159 Epoch: 004/010 | Batch 100/469 | Cost: 0.1092 Epoch: 004/010 | Batch 150/469 | Cost: 0.1112 Epoch: 004/010 | Batch 200/469 | Cost: 0.1147 Epoch: 004/010 | Batch 250/469 | Cost: 0.1148 Epoch: 004/010 | Batch 300/469 | Cost: 0.1136 Epoch: 004/010 | Batch 350/469 | Cost: 0.1141 Epoch: 004/010 | Batch 400/469 | Cost: 0.1098 Epoch: 004/010 | Batch 450/469 | Cost: 0.1160 Time elapsed: 0.48 min Epoch: 005/010 | Batch 000/469 | Cost: 0.1103 Epoch: 005/010 | Batch 050/469 | Cost: 0.1097 Epoch: 005/010 | Batch 100/469 | Cost: 0.1102 Epoch: 005/010 | Batch 150/469 | Cost: 0.1116 Epoch: 005/010 | Batch 200/469 | Cost: 0.1120 Epoch: 005/010 | Batch 250/469 | Cost: 0.1110 Epoch: 005/010 | Batch 300/469 | Cost: 0.1137 Epoch: 005/010 | Batch 350/469 | Cost: 0.1110 Epoch: 005/010 | Batch 400/469 | Cost: 0.1087 Epoch: 005/010 | Batch 450/469 | Cost: 0.1150 Time elapsed: 0.60 min Epoch: 006/010 | Batch 000/469 | Cost: 0.1111 Epoch: 006/010 | Batch 050/469 | Cost: 0.1038 Epoch: 006/010 | Batch 100/469 | Cost: 0.1088 Epoch: 006/010 | Batch 150/469 | Cost: 0.1112 Epoch: 006/010 | Batch 200/469 | Cost: 0.1080 Epoch: 006/010 | Batch 250/469 | Cost: 0.1091 Epoch: 006/010 | Batch 300/469 | Cost: 0.1073 Epoch: 006/010 | Batch 350/469 | Cost: 0.1089 Epoch: 006/010 | Batch 400/469 | Cost: 0.1058 Epoch: 006/010 | Batch 450/469 | Cost: 0.1121 Time elapsed: 0.71 min Epoch: 007/010 | Batch 000/469 | Cost: 0.1064 Epoch: 007/010 | Batch 050/469 | Cost: 0.1049 Epoch: 007/010 | Batch 100/469 | Cost: 0.1067 Epoch: 007/010 | Batch 150/469 | Cost: 0.1061 Epoch: 007/010 | Batch 200/469 | Cost: 0.1078 Epoch: 007/010 | Batch 250/469 | Cost: 0.1010 Epoch: 007/010 | Batch 300/469 | Cost: 0.1028 Epoch: 007/010 | Batch 350/469 | Cost: 0.1076 Epoch: 007/010 | Batch 400/469 | Cost: 0.1061 Epoch: 007/010 | Batch 450/469 | Cost: 0.1049 Time elapsed: 0.83 min Epoch: 008/010 | Batch 000/469 | Cost: 0.1087 Epoch: 008/010 | Batch 050/469 | Cost: 0.1069 Epoch: 008/010 | Batch 100/469 | Cost: 0.1079 Epoch: 008/010 | Batch 150/469 | Cost: 0.1028 Epoch: 008/010 | Batch 200/469 | Cost: 0.1044 Epoch: 008/010 | Batch 250/469 | Cost: 0.1024 Epoch: 008/010 | Batch 300/469 | Cost: 0.1015 Epoch: 008/010 | Batch 350/469 | Cost: 0.1013 Epoch: 008/010 | Batch 400/469 | Cost: 0.1052 Epoch: 008/010 | Batch 450/469 | Cost: 0.1024 Time elapsed: 0.95 min Epoch: 009/010 | Batch 000/469 | Cost: 0.1059 Epoch: 009/010 | Batch 050/469 | Cost: 0.1008 Epoch: 009/010 | Batch 100/469 | Cost: 0.1059 Epoch: 009/010 | Batch 150/469 | Cost: 0.1046 Epoch: 009/010 | Batch 200/469 | Cost: 0.1038 Epoch: 009/010 | Batch 250/469 | Cost: 0.1039 Epoch: 009/010 | Batch 300/469 | Cost: 0.0989 Epoch: 009/010 | Batch 350/469 | Cost: 0.1043 Epoch: 009/010 | Batch 400/469 | Cost: 0.1048 Epoch: 009/010 | Batch 450/469 | Cost: 0.1047 Time elapsed: 1.06 min Epoch: 010/010 | Batch 000/469 | Cost: 0.1054 Epoch: 010/010 | Batch 050/469 | Cost: 0.1027 Epoch: 010/010 | Batch 100/469 | Cost: 0.1027 Epoch: 010/010 | Batch 150/469 | Cost: 0.1005 Epoch: 010/010 | Batch 200/469 | Cost: 0.1035 Epoch: 010/010 | Batch 250/469 | Cost: 0.1006 Epoch: 010/010 | Batch 300/469 | Cost: 0.1039 Epoch: 010/010 | Batch 350/469 | Cost: 0.0974 Epoch: 010/010 | Batch 400/469 | Cost: 0.1034 Epoch: 010/010 | Batch 450/469 | Cost: 0.1026 Time elapsed: 1.18 min Total Training Time: 1.18 min
%matplotlib inline
import matplotlib.pyplot as plt
##########################
### VISUALIZATION
##########################
n_images = 15
image_width = 28
fig, axes = plt.subplots(nrows=2, ncols=n_images,
sharex=True, sharey=True, figsize=(20, 2.5))
orig_images = features[:n_images]
decoded_images = decoded[:n_images]
for i in range(n_images):
for ax, img in zip(axes, [orig_images, decoded_images]):
curr_img = img[i].detach().to(torch.device('cpu'))
ax[i].imshow(curr_img.view((image_width, image_width)), cmap='binary')
%watermark -iv
numpy 1.15.4 torch 1.0.0