STAT 479: Deep Learning (Spring 2019)
Instructor: Sebastian Raschka (sraschka@wisc.edu)
Course website: http://pages.stat.wisc.edu/~sraschka/teaching/stat479-ss2019/
GitHub repository: https://github.com/rasbt/stat479-deep-learning-ss19
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka CPython 3.6.8 IPython 7.2.0 torch 1.0.1.post2
A GAN with mode collapse.
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader
if torch.cuda.is_available():
torch.backends.cudnn.deterministic = True
##########################
### SETTINGS
##########################
# Device
device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
# Hyperparameters
random_seed = 0
generator_learning_rate = 0.001
discriminator_learning_rate = 0.001
num_epochs = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
IMG_SIZE *= x
##########################
### MNIST DATASET
##########################
# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=False)
# Checking the dataset
for images, labels in train_loader:
print('Image batch dimensions:', images.shape)
print('Image label dimensions:', labels.shape)
break
Image batch dimensions: torch.Size([128, 1, 28, 28]) Image label dimensions: torch.Size([128])
##########################
### MODEL
##########################
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class GAN(torch.nn.Module):
def __init__(self):
super(GAN, self).__init__()
self.generator = nn.Sequential(
nn.Linear(LATENT_DIM, 128),
nn.LeakyReLU(inplace=True),
nn.Dropout(p=0.5),
nn.Linear(128, IMG_SIZE),
nn.Tanh()
)
self.discriminator = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=8, out_channels=8, padding=1, stride=2, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=8, out_channels=16, padding=1, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=16, padding=1, stride=2, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=32, padding=1, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=32, out_channels=32, padding=1, stride=2, kernel_size=(3, 3)),
nn.LeakyReLU(inplace=True),
nn.AdaptiveAvgPool2d(1),
Flatten(),
nn.Linear(32, 16),
nn.LeakyReLU(inplace=True),
nn.Linear(16, 1),
nn.Sigmoid()
)
def generator_forward(self, z):
img = self.generator(z)
return img
def discriminator_forward(self, img):
pred = model.discriminator(img)
return pred.view(-1)
torch.manual_seed(random_seed)
#del model
model = GAN()
model = model.to(device)
print(model)
GAN( (generator): Sequential( (0): Linear(in_features=100, out_features=128, bias=True) (1): LeakyReLU(negative_slope=0.01, inplace) (2): Dropout(p=0.5) (3): Linear(in_features=128, out_features=784, bias=True) (4): Tanh() ) (discriminator): Sequential( (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) (1): LeakyReLU(negative_slope=0.01, inplace) (2): Conv2d(8, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (3): LeakyReLU(negative_slope=0.01, inplace) (4): Conv2d(8, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) (5): LeakyReLU(negative_slope=0.01, inplace) (6): Conv2d(16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (7): LeakyReLU(negative_slope=0.01, inplace) (8): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) (9): LeakyReLU(negative_slope=0.01, inplace) (10): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (11): LeakyReLU(negative_slope=0.01, inplace) (12): AdaptiveAvgPool2d(output_size=1) (13): Flatten() (14): Linear(in_features=32, out_features=16, bias=True) (15): LeakyReLU(negative_slope=0.01, inplace) (16): Linear(in_features=16, out_features=1, bias=True) (17): Sigmoid() ) )
################################
## FOR DEBUGGING
"""
outputs= []
def hook(module, input, output):
outputs.append(output)
for i, layer in enumerate(model.discriminator):
if isinstance(layer, torch.nn.modules.conv.Conv2d):
model.discriminator[i].register_forward_hook(hook)
"""
'\noutputs= []\ndef hook(module, input, output):\n outputs.append(output)\n\nfor i, layer in enumerate(model.discriminator):\n if isinstance(layer, torch.nn.modules.conv.Conv2d):\n model.discriminator[i].register_forward_hook(hook)\n'
optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)
start_time = time.time()
discr_costs = []
gener_costs = []
for epoch in range(num_epochs):
model = model.train()
for batch_idx, (features, targets) in enumerate(train_loader):
# Normalize images to [-1, 1] range
features = (features - 0.5)*2.
features = features.view(-1, IMG_SIZE).to(device)
targets = targets.to(device)
valid = torch.ones(targets.size(0)).float().to(device)
fake = torch.zeros(targets.size(0)).float().to(device)
### FORWARD AND BACK PROP
# --------------------------
# Train Generator
# --------------------------
# Make new images
z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
# Loss for fooling the discriminator
discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
gener_loss = F.binary_cross_entropy(discr_pred, valid)
optim_gener.zero_grad()
gener_loss.backward()
optim_gener.step()
# --------------------------
# Train Discriminator
# --------------------------
discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
real_loss = F.binary_cross_entropy(discr_pred_real, valid)
discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())
fake_loss = F.binary_cross_entropy(discr_pred_fake, fake)
discr_loss = 0.5*(real_loss + fake_loss)
optim_discr.zero_grad()
discr_loss.backward()
optim_discr.step()
discr_costs.append(discr_loss)
gener_costs.append(gener_loss)
### LOGGING
if not batch_idx % 100:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f'
%(epoch+1, num_epochs, batch_idx,
len(train_loader), gener_loss, discr_loss))
print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.8191/0.7002 Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 9.8075/0.0002 Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 9.5446/0.0000 Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 10.0814/0.0000 Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 9.6357/0.0003 Time elapsed: 0.13 min Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 20.3945/1.3500 Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 4.4094/0.0196 Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 3.6181/0.0336 Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 4.3095/0.0082 Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 5.0360/0.0157 Time elapsed: 0.26 min Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 6.6942/0.0101 Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 6.1725/0.0595 Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 6.8481/0.0023 Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 4.2729/0.0227 Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 5.3958/0.0133 Time elapsed: 0.40 min Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 4.6660/0.0399 Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 3.4713/0.0829 Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 3.7082/0.0823 Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 3.7073/0.8105 Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 2.6940/0.0788 Time elapsed: 0.53 min Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 2.1446/0.1316 Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 0.9944/0.2929 Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 2.6511/0.0707 Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 4.0230/0.1865 Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 2.1756/0.1421 Time elapsed: 0.66 min Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 3.0052/0.2182 Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 1.0013/0.3836 Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 1.6551/0.4852 Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 1.9426/0.6201 Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 1.1530/0.3668 Time elapsed: 0.79 min Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 2.3737/0.1426 Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 2.8309/0.1801 Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 2.9306/0.1114 Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 3.7784/0.0285 Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 4.1588/0.0134 Time elapsed: 0.93 min Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 4.6513/0.0111 Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 3.4312/0.0427 Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 4.2245/0.0473 Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 2.7220/0.0850 Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 3.5073/0.1203 Time elapsed: 1.07 min Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 1.5709/0.3870 Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 2.3509/0.1541 Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 3.2481/0.0565 Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 0.9537/0.3172 Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 3.5591/0.2461 Time elapsed: 1.21 min Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 1.1312/0.3704 Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 4.0560/0.0390 Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 4.1450/0.0183 Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 3.3677/0.0264 Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 4.1596/0.0106 Time elapsed: 1.34 min Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 6.0002/0.0025 Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 5.7215/0.0196 Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 2.8202/0.1549 Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 3.2199/0.0338 Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 6.5134/0.0534 Time elapsed: 1.48 min Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 3.3812/0.1195 Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 3.4145/0.1364 Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 5.2221/0.0188 Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 4.7663/0.0212 Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 6.8071/0.1268 Time elapsed: 1.62 min Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 4.0798/0.1519 Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 4.8794/0.0436 Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 3.8025/0.0412 Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 4.4867/0.0573 Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 4.9037/0.0263 Time elapsed: 1.75 min Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 6.8079/0.1376 Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 3.1484/0.0750 Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 4.0609/0.0441 Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 3.4956/0.0653 Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 4.4961/0.0219 Time elapsed: 1.90 min Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 4.8989/0.0237 Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 5.1654/0.0649 Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 3.3103/0.0630 Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 3.2231/0.0724 Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 4.0156/0.0341 Time elapsed: 2.02 min Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 4.5806/0.0279 Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 4.0880/0.0414 Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 4.5322/0.0289 Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 3.7142/0.0814 Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 4.0632/0.0389 Time elapsed: 2.16 min Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 4.7213/0.0191 Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 4.7644/0.0242 Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 4.8644/0.0184 Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 0.1334/1.6933 Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 4.1489/0.0795 Time elapsed: 2.30 min Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 5.6407/0.0578 Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 5.2450/0.0622 Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 1.7637/0.2373 Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 1.8366/0.1508 Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 6.3466/0.0246 Time elapsed: 2.43 min Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 5.9773/0.0430 Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 4.8891/0.0742 Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 5.2969/0.0881 Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 4.0312/0.1542 Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 1.0458/0.5415 Time elapsed: 2.57 min Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 4.6327/0.1093 Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 3.3996/0.2361 Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 4.8664/0.0721 Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 4.0799/0.0850 Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 3.9608/0.0512 Time elapsed: 2.71 min Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 4.3915/0.0272 Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 4.5157/0.0226 Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 5.4433/0.0125 Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 5.6631/0.0058 Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 4.8545/0.0451 Time elapsed: 2.84 min Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 5.9173/0.0064 Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 6.6226/0.0513 Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 4.6378/0.0777 Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 5.4159/0.1300 Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 4.1837/0.1011 Time elapsed: 2.99 min Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 3.0598/0.0778 Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 3.9140/0.1117 Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 4.3776/0.0754 Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 5.8252/0.0390 Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 3.5124/0.1554 Time elapsed: 3.13 min Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 3.7119/0.0748 Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 4.3854/0.1691 Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 2.8231/0.1004 Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 2.9831/0.1378 Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 4.4539/0.1419 Time elapsed: 3.25 min Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 2.8281/0.1302 Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 3.0473/0.1550 Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 3.7582/0.1151 Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 2.6347/0.1507 Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 3.1878/0.1181 Time elapsed: 3.39 min Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 3.8894/0.1301 Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 3.7087/0.1481 Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 3.2223/0.1053 Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 3.2072/0.1438 Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 3.5120/0.1408 Time elapsed: 3.53 min Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 3.7745/0.1747 Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 1.6870/0.1961 Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 3.4636/0.1508 Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 3.0113/0.1107 Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 3.1825/0.1528 Time elapsed: 3.67 min Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 2.9715/0.1455 Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 2.2832/0.1317 Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 3.0867/0.1984 Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 2.6859/0.1452 Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 3.3165/0.1026 Time elapsed: 3.81 min Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 3.1777/0.0705 Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 2.6096/0.1220 Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 3.0419/0.1250 Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 2.9442/0.1844 Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 2.5969/0.1067 Time elapsed: 3.95 min Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 2.5172/0.1069 Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 2.5713/0.1150 Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 2.6983/0.1060 Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 2.8666/0.1994 Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 2.7785/0.1124 Time elapsed: 4.10 min Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 2.9825/0.1717 Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 2.7947/0.1243 Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 2.8777/0.1015 Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 2.7536/0.1541 Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 2.5294/0.1054 Time elapsed: 4.24 min Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 2.9892/0.1274 Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 3.1344/0.1206 Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 3.2461/0.1315 Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 2.9910/0.1330 Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 3.2227/0.1722 Time elapsed: 4.38 min Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 3.0134/0.1161 Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 2.9323/0.1662 Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 2.5622/0.1959 Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 2.5586/0.1632 Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 3.2672/0.0646 Time elapsed: 4.52 min Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 2.7480/0.1679 Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 2.9074/0.1751 Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 2.6139/0.1042 Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 2.7318/0.1252 Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 3.3844/0.1270 Time elapsed: 4.66 min Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 2.2964/0.1278 Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 3.8027/0.1360 Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 2.5758/0.1503 Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 2.4648/0.1439 Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 2.5729/0.1366 Time elapsed: 4.81 min Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 3.0542/0.1393 Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 3.7259/0.1339 Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 3.0134/0.1702 Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 2.7425/0.0976 Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 2.3969/0.1752 Time elapsed: 4.94 min Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 2.7174/0.1223 Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 2.6794/0.1139 Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 2.8441/0.1550 Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 2.1665/0.1619 Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 2.9273/0.1260 Time elapsed: 5.08 min Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 2.2155/0.1075 Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 2.5871/0.1234 Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 2.7994/0.1114 Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 2.3692/0.1261 Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 2.6779/0.1784 Time elapsed: 5.23 min Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 2.9587/0.1112 Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 2.3742/0.1256 Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 3.4785/0.0921 Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 2.5784/0.1156 Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 3.2400/0.1973 Time elapsed: 5.38 min Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 2.8712/0.1191 Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 2.5167/0.1558 Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 2.5560/0.1172 Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 2.3734/0.1210 Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 3.0401/0.1555 Time elapsed: 5.53 min Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 2.1231/0.1369 Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 3.3510/0.1174 Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 2.4795/0.1224 Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 2.9753/0.1265 Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 2.8366/0.1011 Time elapsed: 5.68 min Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 2.9721/0.1564 Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 2.9006/0.1496 Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 2.4404/0.1620 Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 3.1879/0.1500 Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 2.9220/0.1110 Time elapsed: 5.83 min Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 3.2125/0.1528 Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 3.0841/0.1438 Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 3.2205/0.1400 Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 3.1740/0.1594 Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 2.6642/0.1378 Time elapsed: 5.98 min Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 2.5351/0.1290 Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 3.0344/0.1531 Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 2.3395/0.1464 Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 2.6482/0.0924 Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 3.2089/0.1454 Time elapsed: 6.11 min Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 2.8598/0.1098 Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 3.2757/0.1462 Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 2.5243/0.1318 Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 2.2915/0.1434 Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 2.7830/0.0933 Time elapsed: 6.25 min Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 2.3748/0.1126 Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 2.8312/0.1620 Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 2.5480/0.1154 Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 3.8736/0.2079 Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 3.0722/0.1372 Time elapsed: 6.40 min Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 3.1334/0.1365 Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 2.9571/0.1772 Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 2.1512/0.1620 Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 3.0626/0.1159 Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 3.6252/0.0949 Time elapsed: 6.54 min Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 2.3513/0.1267 Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 2.9084/0.1197 Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 3.3186/0.0986 Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 2.4635/0.1333 Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 2.6443/0.1481 Time elapsed: 6.68 min Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 2.4796/0.1064 Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 3.5757/0.1433 Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 2.7622/0.1104 Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 3.0908/0.1462 Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 3.7989/0.1239 Time elapsed: 6.82 min Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 3.1282/0.1415 Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 2.9859/0.1006 Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 3.1085/0.0963 Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 2.8229/0.0991 Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 3.3205/0.1386 Time elapsed: 6.97 min Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 2.8247/0.0785 Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 3.0324/0.1137 Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 2.8946/0.0899 Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 3.5022/0.0875 Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 2.6736/0.1139 Time elapsed: 7.12 min Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 1.9022/0.1736 Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 3.0477/0.0753 Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 2.7701/0.1074 Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 2.7447/0.1370 Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 3.2158/0.1397 Time elapsed: 7.27 min Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 3.1773/0.1238 Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 2.3667/0.1339 Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 2.9094/0.0900 Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 2.5499/0.1158 Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 3.7224/0.1705 Time elapsed: 7.42 min Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 3.5804/0.0980 Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 3.0936/0.0996 Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 2.6770/0.1350 Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 2.6068/0.1520 Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 3.3692/0.1132 Time elapsed: 7.58 min Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 2.3943/0.1166 Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 3.1152/0.1144 Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 2.6029/0.0729 Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 2.7339/0.0937 Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 2.9161/0.1119 Time elapsed: 7.73 min Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 2.7520/0.1041 Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 3.8121/0.1418 Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 3.4513/0.0703 Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 3.6389/0.0871 Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 3.0503/0.1190 Time elapsed: 7.88 min Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 3.2804/0.1390 Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 2.8952/0.1000 Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 2.6753/0.1509 Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 2.2671/0.1276 Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 3.3131/0.0724 Time elapsed: 8.04 min Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 3.1438/0.0993 Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 3.0975/0.1086 Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 3.0889/0.1025 Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 2.7163/0.0999 Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 2.5681/0.1232 Time elapsed: 8.19 min Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 3.0687/0.0978 Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 3.3022/0.0962 Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 3.2212/0.0814 Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 3.5668/0.1245 Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 3.2955/0.1304 Time elapsed: 8.35 min Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 2.4413/0.1253 Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 2.9318/0.1344 Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 3.0037/0.0718 Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 2.3378/0.1321 Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 3.1881/0.1060 Time elapsed: 8.50 min Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 2.8712/0.0874 Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 3.6139/0.0830 Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 3.0502/0.0816 Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 3.8713/0.0988 Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 3.6879/0.1182 Time elapsed: 8.65 min Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 2.5053/0.1113 Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 4.1995/0.1434 Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 3.0508/0.1183 Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 3.0112/0.0873 Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 3.0732/0.1087 Time elapsed: 8.79 min Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 2.9887/0.1126 Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 2.8426/0.1267 Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 3.8926/0.0510 Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 3.2453/0.0911 Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 2.9132/0.1187 Time elapsed: 8.95 min Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 2.5179/0.1053 Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 3.7798/0.1449 Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 3.2387/0.0890 Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 2.9539/0.1512 Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 3.6821/0.0746 Time elapsed: 9.10 min Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 3.3936/0.0780 Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 3.5248/0.1110 Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 3.6407/0.1136 Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 3.0048/0.0968 Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 3.5036/0.1325 Time elapsed: 9.24 min Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 2.9120/0.0880 Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 3.7414/0.0901 Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 2.5460/0.1423 Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 3.1396/0.1249 Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 2.4429/0.1363 Time elapsed: 9.40 min Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 3.1125/0.0828 Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 3.0863/0.0940 Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 3.7891/0.0720 Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 3.3832/0.1224 Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 3.2465/0.1175 Time elapsed: 9.56 min Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 3.4586/0.1022 Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 3.2487/0.0653 Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 2.6401/0.0694 Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 3.4535/0.0998 Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 2.5740/0.0819 Time elapsed: 9.71 min Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 3.1160/0.0862 Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 3.8983/0.1603 Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 3.1200/0.0635 Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 2.6191/0.1198 Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 2.9987/0.0626 Time elapsed: 9.87 min Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 3.1219/0.0774 Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 3.0359/0.0811 Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 3.1846/0.0590 Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 3.1547/0.0803 Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 2.3519/0.1086 Time elapsed: 10.03 min Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 3.3671/0.0595 Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 3.0199/0.1308 Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 2.8522/0.0847 Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 4.2285/0.0948 Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 3.6488/0.0650 Time elapsed: 10.17 min Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 2.8196/0.0893 Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 3.4319/0.0414 Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 3.8221/0.0794 Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 5.4797/0.1563 Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 3.5572/0.0927 Time elapsed: 10.33 min Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 3.5723/0.0437 Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 3.0528/0.0740 Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 2.3147/0.1069 Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 3.2988/0.0960 Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 3.3032/0.0722 Time elapsed: 10.49 min Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 4.5668/0.0584 Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 3.3017/0.0539 Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 3.8505/0.0698 Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 3.3187/0.0990 Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 3.7721/0.1260 Time elapsed: 10.65 min Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 3.0352/0.0906 Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 3.0713/0.0657 Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 3.6177/0.1237 Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 3.5230/0.0628 Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 2.9284/0.0716 Time elapsed: 10.81 min Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 4.0102/0.1229 Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 3.7930/0.0448 Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 2.5385/0.0869 Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 4.3480/0.1132 Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 3.7373/0.0620 Time elapsed: 10.97 min Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 3.9883/0.0822 Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 3.2999/0.1150 Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 4.3477/0.1246 Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 2.3716/0.1455 Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 3.2396/0.0709 Time elapsed: 11.12 min Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 3.5886/0.0666 Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 3.8743/0.0945 Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 3.3965/0.1153 Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 3.0722/0.0777 Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 3.8410/0.0690 Time elapsed: 11.29 min Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 3.7707/0.0772 Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 4.4694/0.0727 Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 3.2467/0.0764 Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 4.8306/0.0960 Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 3.9661/0.0918 Time elapsed: 11.45 min Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 3.4578/0.1237 Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 3.1651/0.0998 Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 4.2102/0.0752 Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 3.6477/0.0905 Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 3.9093/0.0966 Time elapsed: 11.61 min Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 4.1477/0.0764 Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 4.9419/0.1248 Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 2.1799/0.1344 Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 3.3992/0.1464 Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 4.0229/0.1614 Time elapsed: 11.77 min Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 3.1771/0.0953 Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 3.1373/0.1283 Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 2.9484/0.1181 Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 3.7685/0.1139 Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 3.0664/0.0902 Time elapsed: 11.94 min Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 3.6873/0.0578 Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 2.9469/0.1053 Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 3.2951/0.0668 Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 4.2244/0.1103 Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 3.6827/0.0860 Time elapsed: 12.11 min Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 4.1115/0.1937 Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 2.9917/0.1257 Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 3.6957/0.0788 Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 3.5981/0.0935 Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 3.8727/0.0577 Time elapsed: 12.27 min Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 5.5784/0.0579 Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 3.8819/0.1453 Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 2.8635/0.0949 Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 3.8998/0.0723 Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 4.9268/0.0787 Time elapsed: 12.44 min Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 3.9878/0.0762 Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 4.2915/0.1318 Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 3.7762/0.0662 Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 4.3526/0.0792 Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 4.4971/0.0714 Time elapsed: 12.60 min Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 4.0826/0.0304 Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 4.0693/0.0572 Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 3.7055/0.0598 Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 3.7015/0.1328 Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 3.4135/0.0788 Time elapsed: 12.76 min Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 3.1326/0.0499 Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 3.0331/0.1031 Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 4.5512/0.1056 Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 3.6907/0.0419 Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 3.8894/0.1329 Time elapsed: 12.92 min Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 3.7961/0.0690 Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 3.9411/0.0651 Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 3.7116/0.1069 Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 3.5103/0.0651 Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 4.1520/0.1008 Time elapsed: 13.08 min Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 5.4606/0.1557 Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 4.0136/0.0856 Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 4.5423/0.1179 Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 3.1036/0.0978 Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 4.1326/0.0466 Time elapsed: 13.25 min Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 3.0463/0.0887 Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 3.9958/0.0706 Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 4.8757/0.1396 Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 3.9912/0.0476 Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 4.5092/0.1062 Time elapsed: 13.41 min Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 3.4385/0.0832 Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 3.4886/0.0870 Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 4.4106/0.0866 Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 5.2300/0.0845 Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 4.0808/0.0594 Time elapsed: 13.57 min Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 3.7461/0.0868 Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 4.1164/0.0412 Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 3.5255/0.0509 Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 3.5892/0.0803 Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 4.2353/0.0567 Time elapsed: 13.73 min Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 3.3102/0.0665 Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 3.9297/0.0982 Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 4.4260/0.0384 Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 4.3291/0.1511 Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 3.8840/0.0866 Time elapsed: 13.90 min Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 3.7895/0.0811 Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 3.4952/0.0851 Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 4.8845/0.0522 Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 5.4452/0.0742 Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 4.2118/0.0418 Time elapsed: 14.06 min Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 3.3709/0.0656 Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 3.8122/0.0888 Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 4.3120/0.0343 Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 3.7838/0.0563 Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 4.2007/0.0545 Time elapsed: 14.22 min Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 4.1091/0.0392 Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 3.3713/0.0457 Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 3.8495/0.0716 Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 4.5038/0.0939 Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 3.2014/0.0670 Time elapsed: 14.39 min Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 3.5217/0.0980 Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 3.7390/0.0673 Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 4.2895/0.0903 Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 4.6033/0.0367 Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 4.2111/0.0623 Time elapsed: 14.56 min Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 5.0543/0.0539 Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 3.9375/0.0702 Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 4.1374/0.0593 Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 4.0087/0.0670 Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 4.7123/0.0807 Time elapsed: 14.73 min Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 3.6641/0.0525 Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 4.5737/0.1005 Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 3.2628/0.0704 Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 3.5679/0.0722 Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 3.3268/0.0567 Time elapsed: 14.90 min Total Training Time: 14.90 min
### For Debugging
#for i in outputs:
# print(i.size())
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(range(len(gener_costs)), gener_costs, label='generator loss')
plt.plot(range(len(discr_costs)), discr_costs, label='discriminator loss')
plt.legend()
plt.show()
##########################
### VISUALIZATION
##########################
model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)
fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))
for i, ax in enumerate(axes):
axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
##########################
### VISUALIZATION
##########################
model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)
fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))
for i, ax in enumerate(axes):
axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
##########################
### VISUALIZATION
##########################
model.eval()
# Make new images
z = torch.zeros((10, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)
fig, axes = plt.subplots(nrows=1, ncols=10, figsize=(20, 2.5))
for i, ax in enumerate(axes):
axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')