from fastai.basic_train import * from fastai.gen_doc.nbdoc import * from fastai.vision import * from fastai.distributed import * show_doc(Learner, title_level=2) path = untar_data(URLs.MNIST_SAMPLE) data = ImageDataBunch.from_folder(path) learn = cnn_learner(data, models.resnet18, metrics=accuracy) show_doc(Learner.lr_find) learn.lr_find() learn.recorder.plot() show_doc(Learner.fit) learn.fit(1) show_doc(Learner.fit_one_cycle) learn.fit_one_cycle(1) show_doc(Learner.predict) learn.data.train_ds[0] data = learn.data.train_ds[0][0] data pred = learn.predict(data) pred learn.data.classes probs = pred[2] learn.data.valid_ds[0][0] show_doc(Learner.get_preds) learn.data.batch_size preds = learn.get_preds() preds preds[0] preds[1] preds[1][0] len(learn.data.valid_ds) len(preds[0]), len(preds[1]) learn.get_preds(ds_type=DatasetType.Train) learn.get_preds(with_loss=True) show_doc(Learner.validate) str(learn.metrics) learn.validate() learn.validate(learn.data.valid_dl) learn.validate(learn.data.train_dl) show_doc(Learner.show_results) learn.show_results() learn.show_results(ds_type=DatasetType.Train) show_doc(Learner.pred_batch) learn.data.batch_size preds = learn.pred_batch() len(preds) preds[:10] item = learn.data.train_ds[0][0] item batch = learn.data.one_item(item) batch learn.pred_batch(batch=batch) show_doc(Learner.interpret, full_name='interpret') jekyll_note('This function only works in the vision application.') show_doc(Learner.summary) show_doc(Learner.TTA, full_name = 'TTA') show_doc(Learner.clip_grad) show_doc(Learner.to_fp16) show_doc(Learner.to_fp32) show_doc(Learner.to_distributed, full_name='to_distributed') show_doc(Learner.to_parallel, full_name='to_parallel') # creates 3 layer groups learn.split(lambda m: (m[0][6], m[1])) # only randomly initialized head now trainable learn.freeze() learn.fit_one_cycle(1) # all layers now trainable learn.unfreeze() # optionally, separate LR and WD for each group learn.fit_one_cycle(1, max_lr=(1e-4, 1e-3, 1e-2), wd=(1e-4,1e-4,1e-1)) show_doc(Learner.lr_range) learn.lr_range(slice(1e-5,1e-3)), learn.lr_range(slice(1e-3)) show_doc(Learner.unfreeze) show_doc(Learner.freeze) show_doc(Learner.freeze_to) show_doc(Learner.split) show_doc(Learner.save) learn.save("trained_model") learn.save("trained_model", return_path=True) show_doc(Learner.load) learn = learn.load("trained_model") show_doc(Learner.export) learn.export() learn.export('trained_model.pkl') path = learn.path path show_doc(load_learner) learn = load_learner(path) learn = load_learner(path, 'trained_model.pkl') show_doc(Learner.purge) show_doc(Learner.destroy) show_doc(Learner.init) show_doc(Learner.mixup) show_doc(Learner.backward) show_doc(Learner.create_opt) show_doc(Learner.dl) learn.dl() learn.dl(DatasetType.Train) show_doc(Recorder, title_level=2) show_doc(Recorder.plot) path = untar_data(URLs.MNIST_SAMPLE) data = ImageDataBunch.from_folder(path) learn = cnn_learner(data, models.resnet18, metrics=accuracy) learn.lr_find() learn.recorder.plot() show_doc(Recorder.plot_losses) learn.fit_one_cycle(5) learn.recorder.plot_losses() show_doc(Recorder.plot_lr) learn.recorder.plot_lr() learn.recorder.plot_lr(show_moms=True) show_doc(Recorder.plot_metrics) learn.recorder.plot_metrics() show_doc(Recorder.on_backward_begin) show_doc(Recorder.on_batch_begin) show_doc(Recorder.on_epoch_end) show_doc(Recorder.on_train_begin) show_doc(Recorder.add_metric_names) show_doc(Recorder.format_stats) show_doc(fit) show_doc(train_epoch) show_doc(validate) show_doc(get_preds) show_doc(loss_batch) show_doc(LearnerCallback, title_level=3) show_doc(RecordOnCPU, title_level=3) show_doc(Learner.tta_only) show_doc(Learner.TTA) show_doc(RecordOnCPU.on_batch_begin)