--- title: Learner keywords: fastai sidebar: home_sidebar summary: "This contains fastai Learner extensions." description: "This contains fastai Learner extensions." nb_path: "nbs/052_learner.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.show_batch[source]

Learner.show_batch(**kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.remove_all_cbs[source]

Learner.remove_all_cbs(max_iters=10)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.one_batch[source]

Learner.one_batch(i, b)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.save_all[source]

Learner.save_all(path='export', dls_fname='dls', model_fname='model', learner_fname='learner', verbose=False)

{% endraw %} {% raw %}

load_all[source]

load_all(path='export', dls_fname='dls', model_fname='model', learner_fname='learner', device=None, pickle_module=pickle, verbose=False)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Recorder.plot_metrics[source]

Recorder.plot_metrics(nrows=None, ncols=None, figsize=None, final_losses=True, perc=0.5, imsize=3, suptitle=None, sharex=False, sharey=False, squeeze=True, subplot_kw=None, gridspec_kw=None)

{% endraw %} {% raw %}

Learner.plot_metrics[source]

Learner.plot_metrics(nrows=1, ncols=1, figsize=None, imsize=3, suptitle=None, sharex=False, sharey=False, squeeze=True, subplot_kw=None, gridspec_kw=None)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.show_probas[source]

Learner.show_probas(figsize=(6, 6), ds_idx=1, dl=None, one_batch=False, max_n=None, nrows=1, ncols=1, imsize=3, suptitle=None, sharex=False, sharey=False, squeeze=True, subplot_kw=None, gridspec_kw=None)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

ts_learner[source]

ts_learner(dls, arch=None, c_in=None, c_out=None, seq_len=None, d=None, splitter=trainable_params, loss_func=None, opt_func=Adam, lr=0.001, cbs=None, metrics=None, path=None, model_dir='models', wd=None, wd_bn_bias=False, train_bn=True, moms=(0.95, 0.85, 0.95), device=None, verbose=False, pretrained=False, weights_path=None, exclude_head=True, cut=-1, init=None)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

tsimage_learner[source]

tsimage_learner(dls, arch=None, pretrained=False, loss_func=None, opt_func=Adam, lr=0.001, cbs=None, metrics=None, path=None, model_dir='models', wd=None, wd_bn_bias=False, train_bn=True, moms=(0.95, 0.85, 0.95), c_in=None, c_out=None, device=None, verbose=False, init=None, p=0.0, n_out=1000, stem_szs=(32, 32, 64), widen=1.0, sa=False, act_cls=ReLU, ndim=2, ks=3, stride=2, groups=1, reduction=None, nh1=None, nh2=None, dw=False, g2=1, sym=False, norm_type=<NormType.Batch: 1>, pool=AvgPool, pool_first=True, padding=None, bias=None, bn_1st=True, transpose=False, xtra=None, bias_std=0.01, dilation:Tuple[~T, ~T]][int]=1, padding_mode:str='zeros', dtype=None)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.decoder[source]

Learner.decoder(o)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

Learner.get_X_preds[source]

Learner.get_X_preds(X, y=None, bs=64, with_input=False, with_decoded=True, with_loss=False, save_preds=None, save_targs=None, concat_dim=0)

{% endraw %} {% raw %}
{% endraw %} {% raw %}
from tsai.data.all import *
from tsai.data.core import *
from tsai.models.FCNPlus import *
dsid = 'OliveOil'
X, y, splits = get_UCR_data(dsid, verbose=True, split_data=False)
tfms  = [None, [Categorize()]]
dls = get_ts_dls(X, y, splits=splits, tfms=tfms)
learn = ts_learner(dls, FCNPlus)
for p in learn.model.parameters():
    p.requires_grad=False
test_eq(count_parameters(learn.model), 0)
learn.freeze()
test_eq(count_parameters(learn.model), 1540)
learn.unfreeze()
test_eq(count_parameters(learn.model), 264580)
Dataset: OliveOil
X      : (60, 1, 570)
y      : (60,)
splits : (#30) [0,1,2,3,4,5,6,7,8,9...] (#30) [30,31,32,33,34,35,36,37,38,39...] 

{% endraw %} {% raw %}
learn.show_batch();
{% endraw %} {% raw %}
learn.fit_one_cycle(2, lr_max=1e-3)
epoch train_loss valid_loss time
0 1.386450 1.391969 00:03
1 1.383805 1.372495 00:03
{% endraw %} {% raw %}
dsid = 'OliveOil'
X, y, splits = get_UCR_data(dsid, split_data=False)
tfms  = [None, [Categorize()]]
dls = get_ts_dls(X, y, tfms=tfms, splits=splits)
learn = ts_learner(dls, FCNPlus, metrics=accuracy)
learn.fit_one_cycle(2)
learn.plot_metrics()
learn.show_probas()
epoch train_loss valid_loss accuracy time
0 1.371040 1.358402 0.300000 00:03
1 1.368754 1.349877 0.300000 00:03
{% endraw %} {% raw %}
learn.save_all()
del learn
learn = load_all()
{% endraw %} {% raw %}
test_probas, test_targets, test_preds = learn.get_X_preds(X[0:10], with_decoded=True)
test_probas, test_targets, test_preds
(tensor([[0.2389, 0.3308, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2389, 0.3309, 0.1760, 0.2543],
         [0.2388, 0.3308, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2389, 0.3308, 0.1760, 0.2542],
         [0.2389, 0.3309, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2390, 0.3308, 0.1760, 0.2542],
         [0.2389, 0.3308, 0.1760, 0.2543]]),
 None,
 (#10) ['2','2','2','2','2','2','2','2','2','2'])
{% endraw %} {% raw %}
test_probas2, test_targets2, test_preds2 = learn.get_X_preds(X[0:10], y[0:10], with_decoded=True)
test_probas2, test_targets2, test_preds2
(tensor([[0.2389, 0.3308, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2389, 0.3309, 0.1760, 0.2543],
         [0.2388, 0.3308, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2389, 0.3308, 0.1760, 0.2542],
         [0.2389, 0.3309, 0.1760, 0.2543],
         [0.2389, 0.3309, 0.1759, 0.2543],
         [0.2390, 0.3308, 0.1760, 0.2542],
         [0.2389, 0.3308, 0.1760, 0.2543]]),
 TensorCategory([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
 (#10) ['2','2','2','2','2','2','2','2','2','2'])
{% endraw %} {% raw %}
test_eq(test_probas, test_probas2)
test_eq(test_preds, test_preds2)
{% endraw %} {% raw %}
learn.fit_one_cycle(1, lr_max=1e-3)
epoch train_loss valid_loss accuracy time
0 1.336296 1.348905 0.300000 00:03
{% endraw %}