%matplotlib inline
%reload_ext autoreload
%autoreload 2
from fastai.conv_learner import *
from fastai.dataset import *
from cgan.options.train_options import *
opt = TrainOptions().parse(['--dataroot', '/data0/datasets/cyclegan/horse2zebra', '--nThreads', '8', '--no_dropout',
'--niter', '100', '--niter_decay', '100', '--name', 'nodrop', '--gpu_ids', '2'])
------------ Options ------------- batchSize: 1 beta1: 0.5 checkpoints_dir: ./checkpoints continue_train: False dataroot: /data0/datasets/cyclegan/horse2zebra dataset_mode: unaligned display_freq: 100 display_id: 1 display_port: 8097 display_single_pane_ncols: 0 display_winsize: 256 epoch_count: 1 fineSize: 256 gpu_ids: [2] init_type: normal input_nc: 3 isTrain: True lambda_A: 10.0 lambda_B: 10.0 lambda_identity: 0.5 loadSize: 286 lr: 0.0002 lr_decay_iters: 50 lr_policy: lambda max_dataset_size: inf model: cycle_gan nThreads: 8 n_layers_D: 3 name: nodrop ndf: 64 ngf: 64 niter: 100 niter_decay: 100 no_dropout: True no_flip: False no_html: False no_lsgan: False norm: instance output_nc: 3 phase: train pool_size: 50 print_freq: 100 resize_or_crop: resize_and_crop save_epoch_freq: 5 save_latest_freq: 5000 serial_batches: False update_html_freq: 1000 which_direction: AtoB which_epoch: latest which_model_netD: basic which_model_netG: resnet_9blocks -------------- End ----------------
from cgan.options.train_options import TrainOptions
from cgan.data.data_loader import CreateDataLoader
from cgan.models.models import create_model
data_loader = CreateDataLoader(opt)
dataset = data_loader.load_data()
dataset_size = len(data_loader)
dataset_size
CustomDatasetDataLoader dataset [UnalignedDataset] was created
1334
model = create_model(opt)
cycle_gan initialization method [normal] initialization method [normal] initialization method [normal] initialization method [normal] ---------- Networks initialized ------------- ResnetGenerator( (model): Sequential( (0): ReflectionPad2d((3, 3, 3, 3)) (1): Conv2d(3, 64, kernel_size=(7, 7), stride=(1, 1)) (2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (5): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (6): ReLU(inplace) (7): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (8): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (9): ReLU(inplace) (10): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (11): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (12): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (13): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (14): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (15): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (16): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (17): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (18): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (19): ConvTranspose2d(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1)) (20): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (21): ReLU(inplace) (22): ConvTranspose2d(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1)) (23): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False) (24): ReLU(inplace) (25): ReflectionPad2d((3, 3, 3, 3)) (26): Conv2d(64, 3, kernel_size=(7, 7), stride=(1, 1)) (27): Tanh() ) ) Total number of parameters: 11378179 ResnetGenerator( (model): Sequential( (0): ReflectionPad2d((3, 3, 3, 3)) (1): Conv2d(3, 64, kernel_size=(7, 7), stride=(1, 1)) (2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (5): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (6): ReLU(inplace) (7): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) (8): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (9): ReLU(inplace) (10): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (11): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (12): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (13): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (14): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (15): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (16): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (17): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (18): ResnetBlock( (conv_block): Sequential( (0): ReflectionPad2d((1, 1, 1, 1)) (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (3): ReLU(inplace) (4): ReflectionPad2d((1, 1, 1, 1)) (5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) ) ) (19): ConvTranspose2d(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1)) (20): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (21): ReLU(inplace) (22): ConvTranspose2d(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1)) (23): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False) (24): ReLU(inplace) (25): ReflectionPad2d((3, 3, 3, 3)) (26): Conv2d(64, 3, kernel_size=(7, 7), stride=(1, 1)) (27): Tanh() ) ) Total number of parameters: 11378179 NLayerDiscriminator( (model): Sequential( (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (1): LeakyReLU(0.2, inplace) (2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (3): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (4): LeakyReLU(0.2, inplace) (5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (7): LeakyReLU(0.2, inplace) (8): Conv2d(256, 512, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1)) (9): InstanceNorm2d(512, eps=1e-05, momentum=0.1, affine=False) (10): LeakyReLU(0.2, inplace) (11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1)) ) ) Total number of parameters: 2764737 NLayerDiscriminator( (model): Sequential( (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (1): LeakyReLU(0.2, inplace) (2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (3): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False) (4): LeakyReLU(0.2, inplace) (5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1)) (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False) (7): LeakyReLU(0.2, inplace) (8): Conv2d(256, 512, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1)) (9): InstanceNorm2d(512, eps=1e-05, momentum=0.1, affine=False) (10): LeakyReLU(0.2, inplace) (11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1)) ) ) Total number of parameters: 2764737 ----------------------------------------------- model [CycleGANModel] was created
# opt.niter=9
# opt.niter_decay=1
total_steps = 0
for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
epoch_start_time = time.time()
iter_data_time = time.time()
epoch_iter = 0
for i, data in tqdm(enumerate(dataset)):
iter_start_time = time.time()
if total_steps % opt.print_freq == 0: t_data = iter_start_time - iter_data_time
total_steps += opt.batchSize
epoch_iter += opt.batchSize
model.set_input(data)
model.optimize_parameters()
if total_steps % opt.display_freq == 0:
save_result = total_steps % opt.update_html_freq == 0
if total_steps % opt.print_freq == 0:
errors = model.get_current_errors()
t = (time.time() - iter_start_time) / opt.batchSize
if total_steps % opt.save_latest_freq == 0:
print('saving the latest model (epoch %d, total_steps %d)' % (epoch, total_steps))
model.save('latest')
iter_data_time = time.time()
if epoch % opt.save_epoch_freq == 0:
print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps))
model.save('latest')
model.save(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' %
(epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
model.update_learning_rate()
1334it [09:06, 2.44it/s] End of epoch 1 / 200 Time Taken: 546 sec learning rate = 0.0002000 1334it [09:06, 2.44it/s] End of epoch 2 / 200 Time Taken: 546 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 3 / 200 Time Taken: 548 sec learning rate = 0.0002000 997it [06:47, 2.44it/s]saving the latest model (epoch 4, total_steps 5000) 1334it [09:06, 2.44it/s] End of epoch 4 / 200 Time Taken: 546 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] saving the model at the end of epoch 5, iters 6670 End of epoch 5 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:12, 2.41it/s] End of epoch 6 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:16, 2.40it/s] End of epoch 7 / 200 Time Taken: 556 sec learning rate = 0.0002000 661it [04:36, 2.39it/s]saving the latest model (epoch 8, total_steps 10000) 1334it [09:16, 2.40it/s] End of epoch 8 / 200 Time Taken: 556 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 9 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 10, iters 13340 End of epoch 10 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:07, 2.44it/s] End of epoch 11 / 200 Time Taken: 547 sec learning rate = 0.0002000 325it [02:13, 2.43it/s]saving the latest model (epoch 12, total_steps 15000) 1334it [09:08, 2.43it/s] End of epoch 12 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 13 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 14 / 200 Time Taken: 550 sec learning rate = 0.0002000 1323it [09:04, 2.43it/s]saving the latest model (epoch 15, total_steps 20000) 1334it [09:09, 2.43it/s] saving the model at the end of epoch 15, iters 20010 End of epoch 15 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 16 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:07, 2.43it/s] End of epoch 17 / 200 Time Taken: 547 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 18 / 200 Time Taken: 549 sec learning rate = 0.0002000 987it [06:48, 2.42it/s]saving the latest model (epoch 19, total_steps 25000) 1334it [09:12, 2.42it/s] End of epoch 19 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 20, iters 26680 End of epoch 20 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 21 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 22 / 200 Time Taken: 549 sec learning rate = 0.0002000 651it [04:28, 2.42it/s]saving the latest model (epoch 23, total_steps 30000) 1334it [09:10, 2.42it/s] End of epoch 23 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:12, 2.42it/s] End of epoch 24 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 25, iters 33350 End of epoch 25 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 26 / 200 Time Taken: 551 sec learning rate = 0.0002000 315it [02:10, 2.41it/s]saving the latest model (epoch 27, total_steps 35000) 1334it [09:10, 2.42it/s] End of epoch 27 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:13, 2.41it/s] End of epoch 28 / 200 Time Taken: 553 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 29 / 200 Time Taken: 549 sec learning rate = 0.0002000 1313it [09:00, 2.43it/s]saving the latest model (epoch 30, total_steps 40000) 1334it [09:09, 2.43it/s] saving the model at the end of epoch 30, iters 40020 End of epoch 30 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 31 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:12, 2.41it/s] End of epoch 32 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 33 / 200 Time Taken: 550 sec learning rate = 0.0002000 977it [06:43, 2.42it/s]saving the latest model (epoch 34, total_steps 45000) 1334it [09:11, 2.42it/s] End of epoch 34 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:12, 2.42it/s] saving the model at the end of epoch 35, iters 46690 End of epoch 35 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 36 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 37 / 200 Time Taken: 551 sec learning rate = 0.0002000 641it [04:25, 2.41it/s]saving the latest model (epoch 38, total_steps 50000) 1334it [09:12, 2.42it/s] End of epoch 38 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 39 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] saving the model at the end of epoch 40, iters 53360 End of epoch 40 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 41 / 200 Time Taken: 550 sec learning rate = 0.0002000 305it [02:06, 2.40it/s]saving the latest model (epoch 42, total_steps 55000) 1334it [09:12, 2.41it/s] End of epoch 42 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 43 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 44 / 200 Time Taken: 549 sec learning rate = 0.0002000 1303it [08:56, 2.43it/s]saving the latest model (epoch 45, total_steps 60000) 1334it [09:09, 2.43it/s] saving the model at the end of epoch 45, iters 60030 End of epoch 45 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:12, 2.41it/s] End of epoch 46 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:12, 2.42it/s] End of epoch 47 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:14, 2.41it/s] End of epoch 48 / 200 Time Taken: 554 sec learning rate = 0.0002000 967it [06:38, 2.42it/s]saving the latest model (epoch 49, total_steps 65000) 1334it [09:10, 2.42it/s] End of epoch 49 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] saving the model at the end of epoch 50, iters 66700 End of epoch 50 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 51 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 52 / 200 Time Taken: 548 sec learning rate = 0.0002000 631it [04:19, 2.43it/s]saving the latest model (epoch 53, total_steps 70000) 1334it [09:09, 2.43it/s] End of epoch 53 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 54 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] saving the model at the end of epoch 55, iters 73370 End of epoch 55 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 56 / 200 Time Taken: 550 sec learning rate = 0.0002000 295it [02:02, 2.40it/s]saving the latest model (epoch 57, total_steps 75000) 1334it [09:13, 2.41it/s] End of epoch 57 / 200 Time Taken: 553 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 58 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:10, 2.43it/s] End of epoch 59 / 200 Time Taken: 550 sec learning rate = 0.0002000 1293it [08:52, 2.43it/s]saving the latest model (epoch 60, total_steps 80000) 1334it [09:09, 2.43it/s] saving the model at the end of epoch 60, iters 80040 End of epoch 60 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 61 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 62 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 63 / 200 Time Taken: 548 sec learning rate = 0.0002000 957it [06:34, 2.43it/s]saving the latest model (epoch 64, total_steps 85000) 1334it [09:10, 2.43it/s] End of epoch 64 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 65, iters 86710 End of epoch 65 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 66 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:12, 2.42it/s] End of epoch 67 / 200 Time Taken: 552 sec learning rate = 0.0002000 621it [04:16, 2.42it/s]saving the latest model (epoch 68, total_steps 90000) 1334it [09:10, 2.42it/s] End of epoch 68 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:10, 2.43it/s] End of epoch 69 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:13, 2.41it/s] saving the model at the end of epoch 70, iters 93380 End of epoch 70 / 200 Time Taken: 554 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 71 / 200 Time Taken: 550 sec learning rate = 0.0002000 285it [01:57, 2.42it/s]saving the latest model (epoch 72, total_steps 95000) 1334it [09:09, 2.43it/s] End of epoch 72 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:13, 2.41it/s] End of epoch 73 / 200 Time Taken: 553 sec learning rate = 0.0002000 1334it [09:13, 2.41it/s] End of epoch 74 / 200 Time Taken: 553 sec learning rate = 0.0002000 1283it [08:51, 2.42it/s]saving the latest model (epoch 75, total_steps 100000) 1334it [09:12, 2.41it/s] saving the model at the end of epoch 75, iters 100050 End of epoch 75 / 200 Time Taken: 553 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 76 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 77 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:14, 2.41it/s] End of epoch 78 / 200 Time Taken: 554 sec learning rate = 0.0002000 947it [06:32, 2.41it/s]saving the latest model (epoch 79, total_steps 105000) 1334it [09:12, 2.41it/s] End of epoch 79 / 200 Time Taken: 552 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 80, iters 106720 End of epoch 80 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 81 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 82 / 200 Time Taken: 551 sec learning rate = 0.0002000 611it [04:13, 2.41it/s]saving the latest model (epoch 83, total_steps 110000) 1334it [09:11, 2.42it/s] End of epoch 83 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] End of epoch 84 / 200 Time Taken: 548 sec learning rate = 0.0002000 1334it [09:08, 2.43it/s] saving the model at the end of epoch 85, iters 113390 End of epoch 85 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 86 / 200 Time Taken: 550 sec learning rate = 0.0002000 275it [01:53, 2.42it/s]saving the latest model (epoch 87, total_steps 115000) 1334it [09:09, 2.43it/s] End of epoch 87 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 88 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 89 / 200 Time Taken: 549 sec learning rate = 0.0002000 1273it [08:43, 2.43it/s]saving the latest model (epoch 90, total_steps 120000) 1334it [09:08, 2.43it/s] saving the model at the end of epoch 90, iters 120060 End of epoch 90 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:11, 2.42it/s] End of epoch 91 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:15, 2.40it/s] End of epoch 92 / 200 Time Taken: 555 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 93 / 200 Time Taken: 549 sec learning rate = 0.0002000 937it [06:26, 2.43it/s]saving the latest model (epoch 94, total_steps 125000) 1334it [09:10, 2.42it/s] End of epoch 94 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] saving the model at the end of epoch 95, iters 126730 End of epoch 95 / 200 Time Taken: 549 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 96 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:09, 2.43it/s] End of epoch 97 / 200 Time Taken: 549 sec learning rate = 0.0002000 601it [04:08, 2.41it/s]saving the latest model (epoch 98, total_steps 130000) 1334it [09:11, 2.42it/s] End of epoch 98 / 200 Time Taken: 551 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] End of epoch 99 / 200 Time Taken: 550 sec learning rate = 0.0002000 1334it [09:10, 2.42it/s] saving the model at the end of epoch 100, iters 133400 End of epoch 100 / 200 Time Taken: 551 sec learning rate = 0.0001980 1334it [09:10, 2.42it/s] End of epoch 101 / 200 Time Taken: 550 sec learning rate = 0.0001960 265it [01:49, 2.42it/s]saving the latest model (epoch 102, total_steps 135000) 1334it [09:09, 2.43it/s] End of epoch 102 / 200 Time Taken: 549 sec learning rate = 0.0001941 1334it [09:09, 2.43it/s] End of epoch 103 / 200 Time Taken: 549 sec learning rate = 0.0001921 1334it [09:13, 2.41it/s] End of epoch 104 / 200 Time Taken: 553 sec learning rate = 0.0001901 1263it [08:42, 2.42it/s]saving the latest model (epoch 105, total_steps 140000) 1334it [09:12, 2.42it/s] saving the model at the end of epoch 105, iters 140070 End of epoch 105 / 200 Time Taken: 552 sec learning rate = 0.0001881 1334it [09:11, 2.42it/s] End of epoch 106 / 200 Time Taken: 551 sec learning rate = 0.0001861 1334it [09:12, 2.41it/s] End of epoch 107 / 200 Time Taken: 552 sec learning rate = 0.0001842 1334it [09:12, 2.41it/s] End of epoch 108 / 200 Time Taken: 552 sec learning rate = 0.0001822 927it [06:24, 2.41it/s]saving the latest model (epoch 109, total_steps 145000) 1334it [09:12, 2.41it/s] End of epoch 109 / 200 Time Taken: 552 sec learning rate = 0.0001802 1334it [09:13, 2.41it/s] saving the model at the end of epoch 110, iters 146740 End of epoch 110 / 200 Time Taken: 554 sec learning rate = 0.0001782 1334it [09:10, 2.42it/s] End of epoch 111 / 200 Time Taken: 550 sec learning rate = 0.0001762 1334it [09:10, 2.43it/s] End of epoch 112 / 200 Time Taken: 550 sec learning rate = 0.0001743 591it [04:04, 2.42it/s]saving the latest model (epoch 113, total_steps 150000) 1334it [09:12, 2.42it/s] End of epoch 113 / 200 Time Taken: 552 sec learning rate = 0.0001723 1334it [09:10, 2.42it/s] End of epoch 114 / 200 Time Taken: 550 sec learning rate = 0.0001703 1334it [09:11, 2.42it/s] saving the model at the end of epoch 115, iters 153410 End of epoch 115 / 200 Time Taken: 552 sec learning rate = 0.0001683 1334it [09:11, 2.42it/s] End of epoch 116 / 200 Time Taken: 551 sec learning rate = 0.0001663 255it [01:46, 2.39it/s]saving the latest model (epoch 117, total_steps 155000) 1334it [09:10, 2.42it/s] End of epoch 117 / 200 Time Taken: 550 sec learning rate = 0.0001644 1334it [09:08, 2.43it/s] End of epoch 118 / 200 Time Taken: 548 sec learning rate = 0.0001624 1334it [09:07, 2.44it/s] End of epoch 119 / 200 Time Taken: 547 sec learning rate = 0.0001604 1253it [08:38, 2.42it/s]saving the latest model (epoch 120, total_steps 160000) 1334it [09:12, 2.42it/s] saving the model at the end of epoch 120, iters 160080 End of epoch 120 / 200 Time Taken: 552 sec learning rate = 0.0001584 1334it [09:10, 2.42it/s] End of epoch 121 / 200 Time Taken: 550 sec learning rate = 0.0001564 1334it [09:11, 2.42it/s] End of epoch 122 / 200 Time Taken: 551 sec learning rate = 0.0001545 1334it [09:09, 2.43it/s] End of epoch 123 / 200 Time Taken: 549 sec learning rate = 0.0001525 917it [06:18, 2.43it/s]saving the latest model (epoch 124, total_steps 165000) 1334it [09:10, 2.42it/s] End of epoch 124 / 200 Time Taken: 550 sec learning rate = 0.0001505 1334it [09:11, 2.42it/s] saving the model at the end of epoch 125, iters 166750 End of epoch 125 / 200 Time Taken: 551 sec learning rate = 0.0001485 1334it [09:09, 2.43it/s] End of epoch 126 / 200 Time Taken: 549 sec learning rate = 0.0001465 1334it [09:10, 2.42it/s] End of epoch 127 / 200 Time Taken: 550 sec learning rate = 0.0001446 581it [03:59, 2.42it/s]saving the latest model (epoch 128, total_steps 170000) 1334it [09:10, 2.42it/s] End of epoch 128 / 200 Time Taken: 550 sec learning rate = 0.0001426 1334it [09:10, 2.42it/s] End of epoch 129 / 200 Time Taken: 550 sec learning rate = 0.0001406 1334it [09:11, 2.42it/s] saving the model at the end of epoch 130, iters 173420 End of epoch 130 / 200 Time Taken: 551 sec learning rate = 0.0001386 1334it [09:12, 2.41it/s] End of epoch 131 / 200 Time Taken: 552 sec learning rate = 0.0001366 245it [01:41, 2.40it/s]saving the latest model (epoch 132, total_steps 175000) 1334it [09:08, 2.43it/s] End of epoch 132 / 200 Time Taken: 548 sec learning rate = 0.0001347 1334it [09:07, 2.44it/s] End of epoch 133 / 200 Time Taken: 547 sec learning rate = 0.0001327 1334it [09:07, 2.44it/s] End of epoch 134 / 200 Time Taken: 547 sec learning rate = 0.0001307 1243it [08:29, 2.44it/s]saving the latest model (epoch 135, total_steps 180000) 1334it [09:07, 2.44it/s] saving the model at the end of epoch 135, iters 180090 End of epoch 135 / 200 Time Taken: 547 sec learning rate = 0.0001287 1334it [09:06, 2.44it/s] End of epoch 136 / 200 Time Taken: 546 sec learning rate = 0.0001267 1334it [09:06, 2.44it/s] End of epoch 137 / 200 Time Taken: 546 sec learning rate = 0.0001248 1334it [09:05, 2.44it/s] End of epoch 138 / 200 Time Taken: 545 sec learning rate = 0.0001228 907it [06:11, 2.44it/s]saving the latest model (epoch 139, total_steps 185000) 1334it [09:06, 2.44it/s] End of epoch 139 / 200 Time Taken: 546 sec learning rate = 0.0001208 1334it [09:07, 2.44it/s] saving the model at the end of epoch 140, iters 186760 End of epoch 140 / 200 Time Taken: 547 sec learning rate = 0.0001188 1334it [09:05, 2.45it/s] End of epoch 141 / 200 Time Taken: 545 sec learning rate = 0.0001168 1334it [09:06, 2.44it/s] End of epoch 142 / 200 Time Taken: 546 sec learning rate = 0.0001149 571it [03:54, 2.44it/s]saving the latest model (epoch 143, total_steps 190000) 1334it [09:07, 2.44it/s] End of epoch 143 / 200 Time Taken: 547 sec learning rate = 0.0001129 1334it [09:06, 2.44it/s] End of epoch 144 / 200 Time Taken: 546 sec learning rate = 0.0001109 1334it [09:07, 2.44it/s] saving the model at the end of epoch 145, iters 193430 End of epoch 145 / 200 Time Taken: 548 sec learning rate = 0.0001089 1334it [09:05, 2.44it/s] End of epoch 146 / 200 Time Taken: 545 sec learning rate = 0.0001069 235it [01:36, 2.43it/s]saving the latest model (epoch 147, total_steps 195000) 1334it [09:06, 2.44it/s] End of epoch 147 / 200 Time Taken: 546 sec learning rate = 0.0001050 1334it [09:07, 2.44it/s] End of epoch 148 / 200 Time Taken: 547 sec learning rate = 0.0001030 1334it [09:05, 2.45it/s] End of epoch 149 / 200 Time Taken: 545 sec learning rate = 0.0001010 1233it [08:26, 2.44it/s]saving the latest model (epoch 150, total_steps 200000) 1334it [09:07, 2.43it/s] saving the model at the end of epoch 150, iters 200100 End of epoch 150 / 200 Time Taken: 548 sec learning rate = 0.0000990 1334it [09:06, 2.44it/s] End of epoch 151 / 200 Time Taken: 546 sec learning rate = 0.0000970 1334it [09:06, 2.44it/s] End of epoch 152 / 200 Time Taken: 546 sec learning rate = 0.0000950 1334it [09:08, 2.43it/s] End of epoch 153 / 200 Time Taken: 548 sec learning rate = 0.0000931 897it [06:07, 2.44it/s]saving the latest model (epoch 154, total_steps 205000) 1334it [09:07, 2.44it/s] End of epoch 154 / 200 Time Taken: 547 sec learning rate = 0.0000911 1334it [09:07, 2.44it/s] saving the model at the end of epoch 155, iters 206770 End of epoch 155 / 200 Time Taken: 548 sec learning rate = 0.0000891 1334it [09:07, 2.44it/s] End of epoch 156 / 200 Time Taken: 547 sec learning rate = 0.0000871 1334it [09:07, 2.44it/s] End of epoch 157 / 200 Time Taken: 547 sec learning rate = 0.0000851 561it [03:50, 2.43it/s]saving the latest model (epoch 158, total_steps 210000) 1334it [09:07, 2.44it/s] End of epoch 158 / 200 Time Taken: 547 sec learning rate = 0.0000832 1334it [09:08, 2.43it/s] End of epoch 159 / 200 Time Taken: 548 sec learning rate = 0.0000812 1334it [09:07, 2.44it/s] saving the model at the end of epoch 160, iters 213440 End of epoch 160 / 200 Time Taken: 547 sec learning rate = 0.0000792 1334it [09:06, 2.44it/s] End of epoch 161 / 200 Time Taken: 546 sec learning rate = 0.0000772 225it [01:32, 2.42it/s]saving the latest model (epoch 162, total_steps 215000) 1334it [09:07, 2.44it/s] End of epoch 162 / 200 Time Taken: 547 sec learning rate = 0.0000752 1334it [09:07, 2.44it/s] End of epoch 163 / 200 Time Taken: 547 sec learning rate = 0.0000733 1334it [09:06, 2.44it/s] End of epoch 164 / 200 Time Taken: 546 sec learning rate = 0.0000713 1223it [08:20, 2.44it/s]saving the latest model (epoch 165, total_steps 220000) 1334it [09:06, 2.44it/s] saving the model at the end of epoch 165, iters 220110 End of epoch 165 / 200 Time Taken: 546 sec learning rate = 0.0000693 1334it [09:07, 2.44it/s] End of epoch 166 / 200 Time Taken: 547 sec learning rate = 0.0000673 1334it [09:06, 2.44it/s] End of epoch 167 / 200 Time Taken: 546 sec learning rate = 0.0000653 1334it [09:07, 2.44it/s] End of epoch 168 / 200 Time Taken: 547 sec learning rate = 0.0000634 887it [06:04, 2.43it/s]saving the latest model (epoch 169, total_steps 225000) 1334it [09:07, 2.44it/s] End of epoch 169 / 200 Time Taken: 547 sec learning rate = 0.0000614 1334it [09:08, 2.43it/s] saving the model at the end of epoch 170, iters 226780 End of epoch 170 / 200 Time Taken: 548 sec learning rate = 0.0000594 1334it [09:07, 2.44it/s] End of epoch 171 / 200 Time Taken: 547 sec learning rate = 0.0000574 1334it [09:06, 2.44it/s] End of epoch 172 / 200 Time Taken: 546 sec learning rate = 0.0000554 551it [03:46, 2.44it/s]saving the latest model (epoch 173, total_steps 230000) 1334it [09:08, 2.43it/s] End of epoch 173 / 200 Time Taken: 548 sec learning rate = 0.0000535 1334it [09:07, 2.44it/s] End of epoch 174 / 200 Time Taken: 547 sec learning rate = 0.0000515 1334it [09:07, 2.44it/s] saving the model at the end of epoch 175, iters 233450 End of epoch 175 / 200 Time Taken: 547 sec learning rate = 0.0000495 1334it [09:06, 2.44it/s] End of epoch 176 / 200 Time Taken: 546 sec learning rate = 0.0000475 215it [01:28, 2.43it/s]saving the latest model (epoch 177, total_steps 235000) 1334it [09:07, 2.44it/s] End of epoch 177 / 200 Time Taken: 547 sec learning rate = 0.0000455 1334it [09:09, 2.43it/s] End of epoch 178 / 200 Time Taken: 549 sec learning rate = 0.0000436 1334it [09:06, 2.44it/s] End of epoch 179 / 200 Time Taken: 546 sec learning rate = 0.0000416 1213it [08:17, 2.44it/s]saving the latest model (epoch 180, total_steps 240000) 1334it [09:07, 2.44it/s] saving the model at the end of epoch 180, iters 240120 End of epoch 180 / 200 Time Taken: 548 sec learning rate = 0.0000396 1334it [09:07, 2.44it/s] End of epoch 181 / 200 Time Taken: 547 sec learning rate = 0.0000376 1334it [09:07, 2.44it/s] End of epoch 182 / 200 Time Taken: 547 sec learning rate = 0.0000356 1334it [09:07, 2.44it/s] End of epoch 183 / 200 Time Taken: 547 sec learning rate = 0.0000337 877it [06:00, 2.44it/s]saving the latest model (epoch 184, total_steps 245000) 1334it [09:07, 2.44it/s] End of epoch 184 / 200 Time Taken: 547 sec learning rate = 0.0000317 1334it [09:07, 2.44it/s] saving the model at the end of epoch 185, iters 246790 End of epoch 185 / 200 Time Taken: 547 sec learning rate = 0.0000297 1334it [09:07, 2.44it/s] End of epoch 186 / 200 Time Taken: 547 sec learning rate = 0.0000277 1334it [09:06, 2.44it/s] End of epoch 187 / 200 Time Taken: 546 sec learning rate = 0.0000257 541it [03:42, 2.43it/s]saving the latest model (epoch 188, total_steps 250000) 1334it [09:06, 2.44it/s] End of epoch 188 / 200 Time Taken: 546 sec learning rate = 0.0000238 1334it [09:09, 2.43it/s] End of epoch 189 / 200 Time Taken: 549 sec learning rate = 0.0000218 1334it [09:07, 2.44it/s] saving the model at the end of epoch 190, iters 253460 End of epoch 190 / 200 Time Taken: 548 sec learning rate = 0.0000198 1334it [09:07, 2.44it/s] End of epoch 191 / 200 Time Taken: 547 sec learning rate = 0.0000178 205it [01:24, 2.42it/s]saving the latest model (epoch 192, total_steps 255000) 1334it [09:07, 2.44it/s] End of epoch 192 / 200 Time Taken: 547 sec learning rate = 0.0000158 1334it [09:06, 2.44it/s] End of epoch 193 / 200 Time Taken: 546 sec learning rate = 0.0000139 1334it [09:05, 2.44it/s] End of epoch 194 / 200 Time Taken: 545 sec learning rate = 0.0000119 1203it [08:13, 2.44it/s]saving the latest model (epoch 195, total_steps 260000) 1334it [09:07, 2.44it/s] saving the model at the end of epoch 195, iters 260130 End of epoch 195 / 200 Time Taken: 548 sec learning rate = 0.0000099 1334it [09:08, 2.43it/s] End of epoch 196 / 200 Time Taken: 548 sec learning rate = 0.0000079 1334it [09:05, 2.45it/s] End of epoch 197 / 200 Time Taken: 545 sec learning rate = 0.0000059 1334it [09:07, 2.44it/s] End of epoch 198 / 200 Time Taken: 547 sec learning rate = 0.0000040 867it [05:55, 2.44it/s]saving the latest model (epoch 199, total_steps 265000) 1334it [09:07, 2.44it/s] End of epoch 199 / 200 Time Taken: 547 sec learning rate = 0.0000020 1334it [09:07, 2.44it/s] saving the model at the end of epoch 200, iters 266800 End of epoch 200 / 200 Time Taken: 548 sec learning rate = 0.0000000
def show_img(im, ax=None, figsize=None):
if not ax: fig,ax = plt.subplots(figsize=figsize)
ax.imshow(im)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
return ax
def get_one(data):
model.set_input(data)
model.test()
return list(model.get_current_visuals().values())
model.save(201)
test_ims = []
for i,o in enumerate(dataset):
if i>10: break
test_ims.append(get_one(o))
def show_grid(ims):
fig,axes = plt.subplots(2,3,figsize=(9,6))
for i,ax in enumerate(axes.flat): show_img(ims[i], ax);
fig.tight_layout()
for i in range(8): show_grid(test_ims[i])
#! wget https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets/horse2zebra.zip