--- title: Losses keywords: fastai sidebar: home_sidebar summary: "This contains losses not available in fastai or Pytorch." description: "This contains losses not available in fastai or Pytorch." nb_path: "nbs/050_losses.ipynb" ---
inp = torch.rand(8, 3, 10)
targ = torch.randn(8, 3, 10)
test_close(HuberLoss(delta=1)(inp, targ), nn.SmoothL1Loss()(inp, targ))
LogCoshLoss()(inp, targ)
inp = torch.rand(8, 3, 10)
targ = torch.randn(8, 3, 10)
targ[targ >.8] = np.nan
nn.L1Loss()(inp, targ), MaskedLossWrapper(nn.L1Loss())(inp, targ)
c_in = 10
x = torch.rand(64, c_in).to(device=default_device())
x = F.softmax(x, dim=1)
label = x.max(dim=1).indices
CenterLoss(c_in).to(x.device)(x, label), CenterPlusLoss(LabelSmoothingCrossEntropyFlat(), c_in).to(x.device)(x, label)
CenterPlusLoss(LabelSmoothingCrossEntropyFlat(), c_in)
inputs = torch.normal(0, 2, (16, 2)).to(device=default_device())
targets = torch.randint(0, 2, (16,)).to(device=default_device())
FocalLoss()(inputs, targets)
c_in = 10
output = torch.rand(64).to(device=default_device())
target = torch.rand(64).to(device=default_device())
TweedieLoss().to(output.device)(output, target)