Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka CPython 3.6.8 IPython 7.2.0 torch 1.0.1.post2
Label Smoothing: Replace Real images (1's) by 0.9, based on the idea in
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader
if torch.cuda.is_available():
torch.backends.cudnn.deterministic = True
##########################
### SETTINGS
##########################
# Device
device = torch.device("cuda:3" if torch.cuda.is_available() else "cpu")
# Hyperparameters
random_seed = 123
generator_learning_rate = 0.0001
discriminator_learning_rate = 0.0001
num_epochs = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
IMG_SIZE *= x
##########################
### MNIST DATASET
##########################
# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=False)
# Checking the dataset
for images, labels in train_loader:
print('Image batch dimensions:', images.shape)
print('Image label dimensions:', labels.shape)
break
Image batch dimensions: torch.Size([128, 1, 28, 28]) Image label dimensions: torch.Size([128])
##########################
### MODEL
##########################
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class Reshape1(nn.Module):
def forward(self, input):
return input.view(input.size(0), 64, 7, 7)
class GAN(torch.nn.Module):
def __init__(self):
super(GAN, self).__init__()
self.generator = nn.Sequential(
nn.Linear(LATENT_DIM, 3136, bias=False),
nn.BatchNorm1d(num_features=3136),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
Reshape1(),
nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
nn.BatchNorm2d(num_features=32),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=32, out_channels=16, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
nn.BatchNorm2d(num_features=16),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=(3, 3), stride=(1, 1), padding=0, bias=False),
nn.BatchNorm2d(num_features=8),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=8, out_channels=1, kernel_size=(2, 2), stride=(1, 1), padding=0, bias=False),
nn.Tanh()
)
self.discriminator = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
nn.BatchNorm2d(num_features=8),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.Conv2d(in_channels=8, out_channels=32, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
nn.BatchNorm2d(num_features=32),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
Flatten(),
nn.Linear(7*7*32, 1),
#nn.Sigmoid()
)
def generator_forward(self, z):
img = self.generator(z)
return img
def discriminator_forward(self, img):
pred = model.discriminator(img)
return pred.view(-1)
torch.manual_seed(random_seed)
#del model
model = GAN()
model = model.to(device)
print(model)
GAN( (generator): Sequential( (0): Linear(in_features=100, out_features=3136, bias=False) (1): BatchNorm1d(3136, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (2): LeakyReLU(negative_slope=0.0001, inplace) (3): Reshape1() (4): ConvTranspose2d(64, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (6): LeakyReLU(negative_slope=0.0001, inplace) (7): ConvTranspose2d(32, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (9): LeakyReLU(negative_slope=0.0001, inplace) (10): ConvTranspose2d(16, 8, kernel_size=(3, 3), stride=(1, 1), bias=False) (11): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (12): LeakyReLU(negative_slope=0.0001, inplace) (13): ConvTranspose2d(8, 1, kernel_size=(2, 2), stride=(1, 1), bias=False) (14): Tanh() ) (discriminator): Sequential( (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (1): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (2): LeakyReLU(negative_slope=0.0001, inplace) (3): Conv2d(8, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (4): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (5): LeakyReLU(negative_slope=0.0001, inplace) (6): Flatten() (7): Linear(in_features=1568, out_features=1, bias=True) ) )
### ## FOR DEBUGGING
"""
outputs = []
def hook(module, input, output):
outputs.append(output)
#for i, layer in enumerate(model.discriminator):
# if isinstance(layer, torch.nn.modules.conv.Conv2d):
# model.discriminator[i].register_forward_hook(hook)
for i, layer in enumerate(model.generator):
if isinstance(layer, torch.nn.modules.ConvTranspose2d):
model.generator[i].register_forward_hook(hook)
"""
'\noutputs = []\ndef hook(module, input, output):\n outputs.append(output)\n\n#for i, layer in enumerate(model.discriminator):\n# if isinstance(layer, torch.nn.modules.conv.Conv2d):\n# model.discriminator[i].register_forward_hook(hook)\n\nfor i, layer in enumerate(model.generator):\n if isinstance(layer, torch.nn.modules.ConvTranspose2d):\n model.generator[i].register_forward_hook(hook)\n'
optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)
start_time = time.time()
discr_costs = []
gener_costs = []
for epoch in range(num_epochs):
model = model.train()
for batch_idx, (features, targets) in enumerate(train_loader):
# Normalize images to [-1, 1] range
features = (features - 0.5)*2.
features = features.view(-1, IMG_SIZE).to(device)
targets = targets.to(device)
valid = torch.ones(targets.size(0)).float().to(device)
fake = torch.zeros(targets.size(0)).float().to(device)
### FORWARD AND BACK PROP
# --------------------------
# Train Generator
# --------------------------
# Make new images
z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
# Loss for fooling the discriminator
discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
gener_loss = F.binary_cross_entropy_with_logits(discr_pred, valid*0.9)
optim_gener.zero_grad()
gener_loss.backward()
optim_gener.step()
# --------------------------
# Train Discriminator
# --------------------------
discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
real_loss = F.binary_cross_entropy_with_logits(discr_pred_real, valid*0.9)
discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())
fake_loss = F.binary_cross_entropy_with_logits(discr_pred_fake, fake)
discr_loss = 0.5*(real_loss + fake_loss)
optim_discr.zero_grad()
discr_loss.backward()
optim_discr.step()
discr_costs.append(discr_loss.item())
gener_costs.append(gener_loss.item())
### LOGGING
if not batch_idx % 100:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f'
%(epoch+1, num_epochs, batch_idx,
len(train_loader), gener_loss, discr_loss))
print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.7199/0.7011 Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 0.8279/0.5914 Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 0.9247/0.5693 Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 0.9432/0.5711 Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 0.9611/0.5634 Time elapsed: 0.12 min Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 0.9903/0.5403 Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 0.9956/0.5292 Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 0.9466/0.5793 Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 0.9046/0.5975 Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 0.8873/0.5808 Time elapsed: 0.23 min Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 0.8739/0.6011 Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 0.8570/0.6170 Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 0.8971/0.6244 Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 0.9126/0.6041 Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 0.8840/0.6154 Time elapsed: 0.34 min Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 0.8995/0.6134 Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 0.8972/0.6153 Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 0.9177/0.5945 Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 0.8854/0.6109 Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 0.8943/0.5872 Time elapsed: 0.45 min Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 0.9017/0.5988 Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 0.8936/0.6066 Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 0.9543/0.5749 Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 0.9124/0.6088 Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 0.9296/0.5717 Time elapsed: 0.57 min Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 0.9465/0.5663 Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 0.9577/0.5539 Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 0.9717/0.5505 Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 0.9644/0.5885 Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 0.9957/0.5362 Time elapsed: 0.68 min Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 0.9872/0.5589 Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 0.9676/0.5416 Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 1.0073/0.5408 Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 0.9919/0.5506 Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 1.0746/0.5052 Time elapsed: 0.79 min Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 0.9764/0.5329 Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 1.0014/0.5399 Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 0.9805/0.5498 Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 0.9289/0.5516 Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 0.9759/0.5605 Time elapsed: 0.90 min Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 1.0388/0.5366 Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 0.9734/0.5642 Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 0.9647/0.5679 Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 1.0362/0.5243 Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 0.9979/0.5692 Time elapsed: 1.02 min Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 0.9470/0.5844 Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 0.9332/0.5765 Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 1.0060/0.5536 Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 0.9568/0.5769 Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 0.9779/0.5648 Time elapsed: 1.13 min Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 0.9031/0.5689 Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 0.9431/0.6049 Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 0.9411/0.5894 Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 0.9091/0.5965 Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 1.0204/0.5738 Time elapsed: 1.24 min Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 0.9692/0.6196 Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 0.9701/0.5777 Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 0.9076/0.5774 Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 0.9328/0.5835 Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 0.9949/0.5762 Time elapsed: 1.35 min Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 0.9661/0.5814 Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 0.9293/0.6001 Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 0.9946/0.5462 Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 0.9182/0.5921 Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 0.8765/0.6099 Time elapsed: 1.46 min Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 0.9496/0.6027 Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 0.9531/0.5943 Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 0.9785/0.5706 Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 0.9208/0.6180 Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 0.9413/0.6163 Time elapsed: 1.57 min Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 0.9433/0.5873 Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 0.9037/0.6233 Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 0.9664/0.6000 Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 0.9632/0.5882 Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 0.9608/0.5906 Time elapsed: 1.69 min Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 0.9694/0.6053 Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 0.9470/0.6012 Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 0.9078/0.6029 Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 1.0011/0.5807 Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 0.9154/0.5970 Time elapsed: 1.80 min Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 0.9974/0.5824 Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 1.0259/0.6210 Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 0.9149/0.6286 Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 1.0055/0.5824 Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 0.9480/0.6367 Time elapsed: 1.91 min Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 1.0554/0.6066 Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 0.9175/0.6078 Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 0.8957/0.6258 Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 0.9580/0.6132 Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 0.9345/0.6109 Time elapsed: 2.02 min Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 0.9828/0.6092 Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 0.9119/0.6228 Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 0.9198/0.6067 Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 0.8892/0.6311 Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 0.9002/0.6336 Time elapsed: 2.13 min Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 0.8469/0.6336 Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 0.9294/0.6057 Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 0.8978/0.6338 Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 0.8667/0.6431 Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 0.9466/0.6222 Time elapsed: 2.24 min Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 0.8880/0.6108 Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 0.9087/0.6223 Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 0.8987/0.6225 Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 0.9042/0.6298 Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 0.8999/0.6155 Time elapsed: 2.36 min Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 0.9178/0.6201 Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 0.9107/0.6331 Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 0.9610/0.6201 Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 0.8632/0.6458 Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 0.8947/0.6223 Time elapsed: 2.47 min Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 0.8869/0.6305 Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 0.8689/0.6270 Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 0.9208/0.5853 Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 0.9038/0.6345 Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 0.9289/0.6401 Time elapsed: 2.58 min Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 0.8647/0.6353 Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 0.8548/0.6260 Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 0.9138/0.6218 Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 0.9182/0.6371 Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 0.8995/0.6475 Time elapsed: 2.69 min Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 0.8810/0.6327 Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 0.8602/0.6528 Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 0.9301/0.6224 Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 0.8608/0.6412 Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 0.8841/0.6334 Time elapsed: 2.81 min Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 0.8725/0.6535 Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 0.8364/0.6445 Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 0.8932/0.6317 Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 0.8917/0.6443 Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 0.9525/0.6246 Time elapsed: 2.92 min Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 0.9090/0.6156 Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 0.8831/0.6419 Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 0.8410/0.6749 Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 0.8807/0.6328 Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 0.9155/0.6622 Time elapsed: 3.03 min Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 0.8852/0.6346 Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 0.8874/0.6336 Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 0.9072/0.6393 Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 0.7987/0.6813 Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 0.8685/0.6251 Time elapsed: 3.14 min Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 0.9025/0.6621 Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 0.9036/0.6609 Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 0.8844/0.6401 Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 0.9103/0.6305 Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 0.8738/0.6678 Time elapsed: 3.25 min Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 0.8770/0.6558 Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 0.8777/0.6417 Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 0.8661/0.6318 Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 0.8992/0.6237 Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 0.8660/0.6611 Time elapsed: 3.37 min Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.8464/0.6461 Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.8828/0.6642 Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.8679/0.6387 Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 0.8572/0.6772 Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 0.9429/0.6233 Time elapsed: 3.48 min Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 0.8767/0.6489 Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.8048/0.6767 Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 0.8681/0.6351 Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.8601/0.6436 Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 0.8666/0.6630 Time elapsed: 3.59 min Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 0.8771/0.6336 Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 0.9111/0.6487 Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.8550/0.6380 Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 0.8190/0.6745 Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 0.8462/0.6554 Time elapsed: 3.70 min Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 0.9088/0.6501 Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.8250/0.6630 Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 0.8822/0.6423 Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.8658/0.6655 Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 0.8741/0.6508 Time elapsed: 3.81 min Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.8069/0.6803 Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 0.8712/0.6515 Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 0.8320/0.6476 Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.8694/0.6488 Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.8796/0.6346 Time elapsed: 3.93 min Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 0.8380/0.6704 Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 0.8539/0.7126 Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 0.8768/0.6607 Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 0.8559/0.6335 Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 0.8209/0.6547 Time elapsed: 4.04 min Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 0.8169/0.6817 Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 0.7988/0.6848 Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 0.9129/0.6380 Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 0.8525/0.6541 Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.8710/0.6376 Time elapsed: 4.15 min Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 0.8181/0.6473 Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 0.8506/0.6422 Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.8217/0.6751 Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.8572/0.6677 Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 0.8449/0.6601 Time elapsed: 4.26 min Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.8411/0.6787 Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 0.8835/0.6549 Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.8337/0.6673 Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 0.8514/0.6984 Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 0.8631/0.6412 Time elapsed: 4.37 min Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.8176/0.6792 Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.8179/0.6850 Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 0.8335/0.6718 Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 0.8859/0.6825 Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 0.8693/0.6730 Time elapsed: 4.49 min Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.8637/0.6343 Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.8636/0.6539 Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.8955/0.6711 Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 0.8251/0.6857 Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 0.8457/0.6643 Time elapsed: 4.60 min Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 0.8629/0.6490 Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.8323/0.6871 Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 0.8808/0.6699 Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 0.8435/0.6640 Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 0.8558/0.6665 Time elapsed: 4.71 min Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 0.8284/0.6654 Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 0.8199/0.6679 Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 0.8216/0.6837 Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 0.8753/0.6456 Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 0.8230/0.6883 Time elapsed: 4.82 min Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 0.8064/0.6730 Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 0.8188/0.6722 Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 0.8221/0.6905 Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 0.8629/0.6760 Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 0.8290/0.6629 Time elapsed: 4.93 min Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 0.8116/0.7203 Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 0.7893/0.6742 Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 0.8578/0.6751 Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.8131/0.6710 Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 0.7831/0.6693 Time elapsed: 5.04 min Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 0.8430/0.6424 Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.7889/0.7069 Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 0.8079/0.6661 Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 0.7796/0.6824 Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 0.7898/0.6977 Time elapsed: 5.16 min Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.8528/0.6542 Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 0.8487/0.6886 Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.8615/0.6819 Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 0.8190/0.6941 Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 0.8235/0.6894 Time elapsed: 5.27 min Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.8294/0.6434 Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 0.8551/0.6369 Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 0.8261/0.6878 Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.8543/0.6716 Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.8294/0.6522 Time elapsed: 5.38 min Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 0.8247/0.6794 Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.8154/0.6647 Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 0.8296/0.6700 Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 0.8172/0.6740 Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.7691/0.6888 Time elapsed: 5.50 min Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 0.8136/0.6549 Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 0.8024/0.6925 Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 0.8419/0.6766 Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 0.8240/0.6709 Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 0.8292/0.7057 Time elapsed: 5.61 min Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 0.8241/0.6891 Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 0.8210/0.6406 Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.8175/0.6689 Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 0.8334/0.6648 Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 0.8267/0.6711 Time elapsed: 5.72 min Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.8039/0.6805 Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 0.8033/0.6857 Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.8536/0.6880 Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.8393/0.6800 Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 0.8459/0.6822 Time elapsed: 5.83 min Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 0.8290/0.6822 Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 0.8169/0.6643 Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.8152/0.7038 Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 0.7998/0.6949 Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 0.8221/0.6876 Time elapsed: 5.95 min Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 0.8186/0.6600 Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 0.8263/0.6638 Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.7962/0.6765 Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.8101/0.6706 Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 0.8175/0.6851 Time elapsed: 6.06 min Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.8339/0.6888 Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 0.7916/0.6795 Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 0.8209/0.6790 Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.7967/0.6828 Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 0.7860/0.6943 Time elapsed: 6.17 min Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 0.8065/0.6708 Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 0.8027/0.6784 Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 0.8216/0.6949 Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 0.8061/0.6921 Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 0.7812/0.6780 Time elapsed: 6.29 min Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 0.8028/0.6936 Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.7970/0.6710 Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 0.8144/0.6570 Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.8362/0.6975 Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 0.8126/0.6814 Time elapsed: 6.40 min Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 0.8091/0.6785 Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 0.8226/0.6765 Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.8024/0.6644 Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 0.8224/0.6783 Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 0.8211/0.6676 Time elapsed: 6.51 min Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 0.8184/0.6810 Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.8051/0.6849 Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.7756/0.6998 Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 0.8144/0.6788 Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 0.7813/0.6970 Time elapsed: 6.62 min Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 0.7938/0.6800 Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 0.8060/0.6659 Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 0.7827/0.6901 Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 0.7546/0.6998 Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 0.8366/0.6722 Time elapsed: 6.74 min Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 0.7903/0.6879 Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 0.8260/0.6663 Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 0.7947/0.6939 Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 0.8143/0.6850 Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 0.8254/0.7004 Time elapsed: 6.85 min Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 0.8099/0.6758 Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 0.8110/0.6889 Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 0.8379/0.6662 Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.7999/0.7036 Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 0.7776/0.6999 Time elapsed: 6.96 min Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.8082/0.6919 Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 0.8226/0.6808 Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 0.8243/0.6834 Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.8286/0.6845 Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.8146/0.6853 Time elapsed: 7.08 min Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.8272/0.6534 Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.7547/0.6936 Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 0.8200/0.6931 Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 0.7810/0.7169 Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.7973/0.6703 Time elapsed: 7.19 min Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 0.8039/0.6848 Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.8189/0.6752 Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.8231/0.6467 Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 0.8024/0.6888 Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.8004/0.6713 Time elapsed: 7.30 min Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.8205/0.6749 Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.8348/0.6903 Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 0.7824/0.6773 Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.8135/0.6842 Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.8020/0.6871 Time elapsed: 7.41 min Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.8375/0.6899 Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.8415/0.6502 Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 0.8038/0.6907 Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.8012/0.6827 Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 0.8280/0.6516 Time elapsed: 7.52 min Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.8243/0.6744 Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.7850/0.6963 Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 0.8075/0.6751 Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 0.7744/0.6990 Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.7846/0.7015 Time elapsed: 7.64 min Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 0.8104/0.6562 Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.8232/0.6599 Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 0.7739/0.7005 Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.8124/0.6825 Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 0.7923/0.6777 Time elapsed: 7.75 min Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 0.8202/0.6757 Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 0.8105/0.6709 Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.8243/0.6694 Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 0.7710/0.7105 Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 0.7999/0.6887 Time elapsed: 7.86 min Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.8027/0.6959 Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.8212/0.6674 Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 0.7930/0.7007 Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.7962/0.6659 Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.8309/0.6706 Time elapsed: 7.98 min Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 0.8067/0.6787 Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 0.7947/0.6916 Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 0.8097/0.6599 Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.8121/0.6787 Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 0.8151/0.6584 Time elapsed: 8.09 min Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 0.8355/0.6564 Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 0.7916/0.6759 Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 0.8294/0.6777 Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.7953/0.7039 Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.8148/0.6732 Time elapsed: 8.20 min Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 0.8205/0.7029 Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.7841/0.6938 Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.8151/0.6749 Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 0.7798/0.7146 Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.8040/0.6845 Time elapsed: 8.31 min Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 0.7752/0.6972 Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 0.8305/0.6842 Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 0.7700/0.6857 Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 0.7791/0.6964 Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 0.7983/0.6863 Time elapsed: 8.42 min Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.7717/0.6964 Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 0.7918/0.6895 Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.8036/0.7043 Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.8040/0.6734 Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 0.8152/0.6892 Time elapsed: 8.54 min Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 0.7971/0.6683 Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.7789/0.6932 Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 0.8015/0.6643 Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 0.7693/0.7141 Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.7819/0.6919 Time elapsed: 8.65 min Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.8316/0.6821 Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.7907/0.6816 Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 0.8368/0.6695 Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.8224/0.6707 Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 0.7810/0.7095 Time elapsed: 8.76 min Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 0.7660/0.7034 Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 0.7869/0.6864 Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 0.7815/0.6885 Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 0.8049/0.6789 Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.8154/0.6923 Time elapsed: 8.87 min Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 0.7851/0.6754 Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 0.8187/0.6666 Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 0.7978/0.6959 Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 0.8052/0.6752 Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.7857/0.6899 Time elapsed: 8.98 min Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 0.8003/0.6945 Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 0.8212/0.6729 Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 0.7986/0.6949 Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.8181/0.6839 Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 0.8209/0.6766 Time elapsed: 9.09 min Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.8000/0.6711 Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.8127/0.6659 Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 0.8151/0.6814 Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 0.7647/0.6948 Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 0.8135/0.6856 Time elapsed: 9.20 min Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 0.7892/0.6845 Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.7993/0.6722 Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 0.7962/0.7040 Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.7942/0.6876 Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.8134/0.6798 Time elapsed: 9.31 min Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 0.8093/0.6734 Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 0.8187/0.6674 Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 0.7782/0.6812 Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 0.8002/0.6884 Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 0.7900/0.6939 Time elapsed: 9.43 min Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 0.8315/0.6780 Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 0.7928/0.6933 Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 0.8184/0.6927 Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.7931/0.6728 Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 0.7994/0.6922 Time elapsed: 9.54 min Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 0.8361/0.6710 Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 0.7851/0.6882 Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 0.7830/0.7084 Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 0.8143/0.6771 Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 0.7861/0.7100 Time elapsed: 9.65 min Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 0.8074/0.6770 Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 0.7986/0.6877 Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 0.7811/0.7078 Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 0.8063/0.6912 Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.7759/0.6970 Time elapsed: 9.77 min Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.8119/0.6786 Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.7883/0.6946 Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 0.8029/0.7081 Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.7960/0.6943 Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 0.8103/0.6691 Time elapsed: 9.88 min Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 0.8218/0.6862 Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 0.8221/0.6735 Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 0.8132/0.6719 Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 0.8029/0.6775 Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.8124/0.6704 Time elapsed: 9.99 min Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 0.8017/0.6894 Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 0.7886/0.6985 Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 0.8060/0.6812 Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 0.8271/0.6845 Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.8085/0.6709 Time elapsed: 10.10 min Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 0.7877/0.6932 Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 0.7939/0.6875 Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 0.7696/0.6980 Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 0.7955/0.6857 Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 0.8188/0.6782 Time elapsed: 10.21 min Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.8089/0.6607 Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 0.7861/0.6937 Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.8181/0.6804 Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.8078/0.6750 Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.7809/0.7012 Time elapsed: 10.32 min Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 0.8409/0.6953 Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.8021/0.6843 Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 0.7930/0.6903 Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 0.7649/0.6953 Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 0.8004/0.6960 Time elapsed: 10.43 min Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 0.7997/0.6764 Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 0.7819/0.6906 Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 0.7735/0.7158 Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 0.8126/0.6695 Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 0.8050/0.6769 Time elapsed: 10.54 min Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 0.7984/0.6651 Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 0.8285/0.6742 Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 0.8085/0.6624 Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 0.8018/0.6835 Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 0.7778/0.6889 Time elapsed: 10.66 min Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 0.8166/0.6723 Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.8009/0.6798 Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.8044/0.6817 Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.8166/0.6784 Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.7619/0.6903 Time elapsed: 10.77 min Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 0.7645/0.7070 Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 0.7751/0.6975 Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 0.8049/0.6868 Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 0.7810/0.6798 Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 0.8145/0.6873 Time elapsed: 10.88 min Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 0.7926/0.6958 Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 0.7901/0.6941 Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 0.8026/0.6743 Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 0.7940/0.6922 Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 0.7677/0.6990 Time elapsed: 10.99 min Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 0.7952/0.6971 Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 0.8201/0.6741 Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 0.7985/0.6874 Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 0.8090/0.6707 Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 0.7830/0.6894 Time elapsed: 11.10 min Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 0.7863/0.6857 Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 0.8107/0.6853 Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 0.7711/0.7107 Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 0.7950/0.6758 Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 0.7759/0.7018 Time elapsed: 11.20 min Total Training Time: 11.20 min
### For Debugging
"""
for i in outputs:
print(i.size())
"""
'\nfor i in outputs:\n print(i.size())\n'
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(range(len(gener_costs)), gener_costs, label='generator loss')
plt.plot(range(len(discr_costs)), discr_costs, label='discriminator loss')
plt.legend()
plt.show()
##########################
### VISUALIZATION
##########################
model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)
fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))
for i, ax in enumerate(axes):
axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
from torchsummary import summary
model = model.to('cuda:0')
summary(model.generator, input_size=(100,))
summary(model.discriminator, input_size=(1, 28, 28))
---------------------------------------------------------------- Layer (type) Output Shape Param # ================================================================ Linear-1 [-1, 3136] 313,600 BatchNorm1d-2 [-1, 3136] 6,272 LeakyReLU-3 [-1, 3136] 0 Reshape1-4 [-1, 64, 7, 7] 0 ConvTranspose2d-5 [-1, 32, 13, 13] 18,432 BatchNorm2d-6 [-1, 32, 13, 13] 64 LeakyReLU-7 [-1, 32, 13, 13] 0 ConvTranspose2d-8 [-1, 16, 25, 25] 4,608 BatchNorm2d-9 [-1, 16, 25, 25] 32 LeakyReLU-10 [-1, 16, 25, 25] 0 ConvTranspose2d-11 [-1, 8, 27, 27] 1,152 BatchNorm2d-12 [-1, 8, 27, 27] 16 LeakyReLU-13 [-1, 8, 27, 27] 0 ConvTranspose2d-14 [-1, 1, 28, 28] 32 Tanh-15 [-1, 1, 28, 28] 0 ================================================================ Total params: 344,208 Trainable params: 344,208 Non-trainable params: 0 ---------------------------------------------------------------- Input size (MB): 0.00 Forward/backward pass size (MB): 0.59 Params size (MB): 1.31 Estimated Total Size (MB): 1.91 ---------------------------------------------------------------- ---------------------------------------------------------------- Layer (type) Output Shape Param # ================================================================ Conv2d-1 [-1, 8, 14, 14] 72 BatchNorm2d-2 [-1, 8, 14, 14] 16 LeakyReLU-3 [-1, 8, 14, 14] 0 Conv2d-4 [-1, 32, 7, 7] 2,304 BatchNorm2d-5 [-1, 32, 7, 7] 64 LeakyReLU-6 [-1, 32, 7, 7] 0 Flatten-7 [-1, 1568] 0 Linear-8 [-1, 1] 1,569 ================================================================ Total params: 4,025 Trainable params: 4,025 Non-trainable params: 0 ---------------------------------------------------------------- Input size (MB): 0.00 Forward/backward pass size (MB): 0.08 Params size (MB): 0.02 Estimated Total Size (MB): 0.10 ----------------------------------------------------------------