Hello @ptrblck,
I am using a custom contrastive loss function as
def loss_contrastive(euclidean_distance, label_batch):
margin = 100
loss = torch.mean( (label_batch) * torch.pow(euclidean_distance, 2) +
(1-label_batch) * torch.pow(torch.clamp(margin - euclidean_distance, min=0.0), 2))
However, I get this error
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-95-9478fc9e762e> in <module>
----> 1 interp = Interpretation.from_learner(learn)
~/anaconda3/envs/pytorch/lib/python3.6/site-packages/fastai/train.py in from_learner(cls, learn, ds_type, activ)
158 def from_learner(cls, learn: Learner, ds_type:DatasetType=DatasetType.Valid, activ:nn.Module=None):
159 "Gets preds, y_true, losses to construct base class from a learner"
--> 160 preds_res = learn.get_preds(ds_type=ds_type, activ=activ, with_loss=True)
161 return cls(learn, *preds_res)
162
~/anaconda3/envs/pytorch/lib/python3.6/site-packages/fastai/basic_train.py in get_preds(self, ds_type, activ, with_loss, n_batch, pbar)
339 callbacks = [cb(self) for cb in self.callback_fns + listify(defaults.extra_callback_fns)] + listify(self.callbacks)
340 return get_preds(self.model, self.dl(ds_type), cb_handler=CallbackHandler(callbacks),
--> 341 activ=activ, loss_func=lf, n_batch=n_batch, pbar=pbar)
342
343 def pred_batch(self, ds_type:DatasetType=DatasetType.Valid, batch:Tuple=None, reconstruct:bool=False,
~/anaconda3/envs/pytorch/lib/python3.6/site-packages/fastai/basic_train.py in get_preds(model, dl, pbar, cb_handler, activ, loss_func, n_batch)
44 zip(*validate(model, dl, cb_handler=cb_handler, pbar=pbar, average=False, n_batch=n_batch))]
45 if loss_func is not None:
---> 46 with NoneReduceOnCPU(loss_func) as lf: res.append(lf(res[0], res[1]))
47 if activ is not None: res[0] = activ(res[0])
48 return res
TypeError: loss_contrastive() got an unexpected keyword argument 'reduction'
How to make the loss function compatible?