hi,
my network have two branch , seg branch and dt branch
I use outputs of two branch to cmpute KLDivLoss,but I get error:one of the variables needed for gradient computation has been modified by an inplace operation , when I loss.backward
class SegmentationMultiLosses(nn.CrossEntropyLoss):
"""2D Cross Entropy Loss with Multi-L1oss"""
def __init__(self, nclass=-1, weight=None,size_average=True, ignore_index=-1):
super(SegmentationMultiLosses, self).__init__(weight, size_average, ignore_index)
self.nclass = nclass
def forward(self, *inputs):
out, target_img, target_exist, target_dt= tuple(inputs)
out_img, out_dt_img, out_exist = out
loss_seg = super(SegmentationMultiLosses, self).forward(out_img, target_img)
loss_exist = nn.BCELoss().forward(out_exist, target_exist)
loss_dt = nn.MSELoss().forward(out_dt_img, target_dt)
out_dt_norm = self.dt_norm(out_dt_img)
out_seg_sm = F.softmax(out_img, dim=1)
loss_fuse = nn.KLDivLoss().forward(out_seg_sm[:,1:,:,:], out_dt_norm.clone())
print('loss_seg: {} loss_exit: {} loss_dt: {} loss_fuse: {} '.format(loss_seg.item(),
loss_exist.item()*0.1, loss_dt.item(), loss_fuse.item()))
loss = loss_seg + loss_dt + 0.1*loss_exist + loss_fuse
return loss
def dt_norm(self, dt_out):
# clamp a tensor [0, 10] and Normalization
max = 10
min = 0
for i in range(dt_out.shape[0]):
for j in range(dt_out.shape[1]):
dt_out[i,j,:,:] = torch.div(torch.clamp(dt_out[i,j,:,:], min, max), max)
return dt_out
dt_norm function is an inplace operation ?
thanks