I have used the color map you suggested here but I got different outputs. The target shape is [3, 256, 256] and has 21 classes. The function produces random class indices shape. When I use CrossEntropyLoss, this error rises up: RuntimeError: CUDA error: device-side assert triggered
def Convert_gts(n_classes, target):
nb_classes = n_classes - 1 # 20 classes + background
idx = np.linspace(0., 1., nb_classes)
cmap = matplotlib.cm.get_cmap('jet')
rgb = cmap(idx, bytes=True)[:, :3] # Remove alpha value
target = target.reshape( 256 * 256, 3)
h, w = 256, 256
rgb = rgb.repeat(3276.8, 0)
target[:rgb.shape[0]] = rgb
target = target.reshape(h, w, 3)
target = torch.from_numpy(target)
colors = torch.unique(target.view(-1, 3), dim=0).numpy()
target = target.permute(2, 0, 1).contiguous()
mapping = {tuple(c): t for c, t in zip(colors.tolist(), range(len(colors)))}
mask = torch.zeros(h, w, dtype=torch.long)
for k in mapping:
idx = (target == torch.tensor(k, dtype=torch.uint8).unsqueeze(1).unsqueeze(2))
validx = (idx.sum(0) == 3) # Check that all channels match
mask[validx] = torch.tensor(mapping[k], dtype=torch.long)
return mask
Target.Shape: torch.Size([3, 256, 256])
Colors.Shape: (21, 3)
Num_Class: 21
Mask.Shape: torch.Size([256, 256])
------------------------------------------
Target.Shape: torch.Size([3, 256, 256])
Colors.Shape: (27, 3)
Num_Class: 21
Mask.Shape: torch.Size([256, 256])
------------------------------------------
Target.Shape: torch.Size([3, 256, 256])
Colors.Shape: (25, 3)
Num_Class: 21
Mask.Shape: torch.Size([256, 256])