%reload_ext autoreload
%autoreload 2
from fastai.vision import *
from fastai.vision.models.wrn import wrn_22
torch.backends.cudnn.benchmark = True
path = untar_data(URLs.CIFAR)
path
PosixPath('/home/ubuntu/.fastai/data/cifar10')
ds_tfms = ([*rand_pad(4, 32), flip_lr(p=0.5)], [])
data = ImageDataBunch.from_folder(path, valid='test', ds_tfms=ds_tfms, bs=512).normalize(cifar_stats)
learn = Learner(data, wrn_22(), metrics=accuracy).to_fp16()
learn.fit_one_cycle(30, 3e-3, wd=0.4, div_factor=10, pct_start=0.5)
epoch | train_loss | valid_loss | accuracy |
---|---|---|---|
1 | 1.493729 | 1.288911 | 0.532400 |
2 | 1.161237 | 1.103286 | 0.604100 |
3 | 0.958353 | 0.996172 | 0.649300 |
4 | 0.829848 | 1.120279 | 0.638500 |
5 | 0.716744 | 0.724809 | 0.752300 |
6 | 0.634061 | 1.139240 | 0.626800 |
7 | 0.574845 | 1.627489 | 0.506100 |
8 | 0.531848 | 0.912567 | 0.712200 |
9 | 0.489270 | 0.791987 | 0.745500 |
10 | 0.459794 | 0.646239 | 0.782000 |
11 | 0.431601 | 0.640238 | 0.789400 |
12 | 0.402780 | 0.648663 | 0.793200 |
13 | 0.387314 | 0.614063 | 0.793000 |
14 | 0.366800 | 0.594612 | 0.813600 |
15 | 0.338351 | 0.620742 | 0.804600 |
16 | 0.324927 | 0.470762 | 0.841500 |
17 | 0.302258 | 0.468217 | 0.844900 |
18 | 0.286116 | 0.421791 | 0.859000 |
19 | 0.257466 | 0.428825 | 0.859200 |
20 | 0.233121 | 0.343100 | 0.887100 |
21 | 0.205734 | 0.342273 | 0.887500 |
22 | 0.176312 | 0.318532 | 0.896700 |
23 | 0.144774 | 0.328396 | 0.896100 |
24 | 0.119999 | 0.287829 | 0.910800 |
25 | 0.087010 | 0.232755 | 0.928600 |
26 | 0.060723 | 0.236310 | 0.931400 |
27 | 0.042571 | 0.207955 | 0.942000 |
28 | 0.027802 | 0.217585 | 0.938900 |
29 | 0.020010 | 0.209865 | 0.943000 |
30 | 0.016231 | 0.209546 | 0.943000 |
With mixup
learn = Learner(data, wrn_22(), metrics=accuracy).to_fp16().mixup()
learn.fit_one_cycle(24, 3e-3, wd=0.2, div_factor=10, pct_start=0.5)
epoch | train_loss | valid_loss | accuracy |
---|---|---|---|
1 | 1.806122 | 1.413667 | 0.504500 |
2 | 1.592665 | 1.189260 | 0.590700 |
3 | 1.461559 | 1.018693 | 0.655400 |
4 | 1.370229 | 0.874307 | 0.712100 |
5 | 1.296808 | 0.913873 | 0.704000 |
6 | 1.250895 | 0.836409 | 0.733900 |
7 | 1.209640 | 0.736776 | 0.778600 |
8 | 1.186605 | 0.753798 | 0.767200 |
9 | 1.166516 | 0.757842 | 0.767700 |
10 | 1.137516 | 0.699450 | 0.806500 |
11 | 1.120571 | 0.736078 | 0.780600 |
12 | 1.103785 | 0.909942 | 0.710700 |
13 | 1.073971 | 0.530825 | 0.856600 |
14 | 1.055455 | 0.583879 | 0.831600 |
15 | 1.035860 | 0.509721 | 0.868300 |
16 | 1.017207 | 0.510995 | 0.867800 |
17 | 0.995223 | 0.446647 | 0.889100 |
18 | 0.962532 | 0.378901 | 0.904300 |
19 | 0.940812 | 0.352570 | 0.917800 |
20 | 0.922071 | 0.332144 | 0.928500 |
21 | 0.899262 | 0.326830 | 0.932000 |
22 | 0.880337 | 0.312892 | 0.936600 |
23 | 0.874789 | 0.306469 | 0.940000 |
24 | 0.865873 | 0.305611 | 0.939200 |