HI, @Zhengtian
I just reuse the loss in that project and train model with my own script and private data-set. So I only checked code loss.py. It worked correctly. Maybe there is something else wrong.
I rewrite the loss.py as a nn.Module. Hoping it’s helpful for you.
import torch.nn.functional as F
import torch.nn as nn
class CrossEntropy2d(nn.Module):
def __init__(self, size_average=True, ignore_label=255):
super(CrossEntropy2d, self).__init__()
self.size_average = size_average
self.ignore_label = ignore_label
def forward(self, predict, target, weight=None):
"""
Args:
predict:(n, c, h, w)
target:(n, h, w)
weight (Tensor, optional): a manual rescaling weight given to each class.
If given, has to be a Tensor of size "nclasses"
"""
assert not target.requires_grad
assert predict.dim() == 4
assert target.dim() == 3
assert predict.size(0) == target.size(0), "{0} vs {1} ".format(predict.size(0), target.size(0))
assert predict.size(2) == target.size(1), "{0} vs {1} ".format(predict.size(2), target.size(1))
assert predict.size(3) == target.size(2), "{0} vs {1} ".format(predict.size(3), target.size(3))
n, c, h, w = predict.size()
target_mask = (target >= 0) * (target != self.ignore_label)
target = target[target_mask]
predict = predict.transpose(1, 2).transpose(2, 3).contiguous()
predict = predict[target_mask.view(n, h, w, 1).repeat(1, 1, 1, c)].view(-1, c)
loss = F.cross_entropy(predict, target, weight=weight, size_average=self.size_average)
return loss
Best Regards