Is this dice loss function correct?

I try to use the dice loss to calculate the distance between the true mask and the predicted mask,
code:

#inputs.shape = [batch_size, cls, w, h] input(y_pred)
#target.shape = [batch_size, 1, w, h] target(y_true)
class DiceLoss(torch.nn.Module):
    def __init__(self, weight=None, size_average=True):
        super(DiceLoss, self).__init__()

    def forward(self, inputs, targets, num_classes, smooth=1):
        
        inputs = torch.sigmoid(inputs)
        print("inputs.shape:",inputs.shape)
        targets = targets.squeeze(1) # target´╝Ü[batch_size, 1, w, h] -> [batch_size, w, h]
        targets = torch.nn.functional.one_hot(targets, num_classes) # [batch_size, w, h, cls]
        targets = targets.permute(0,3,1,2) # [batch_size, w, h, cls] -> [batch_size, cls, w, h]
        targets = targets.contiguous()
        print("targets.shape:",targets.shape)
        #flatten label and prediction tensors
        inputs = inputs.view(-1)
        targets = targets.view(-1)
        intersection = (inputs * targets).sum()               
        dice = (2.*intersection + smooth)/(inputs.sum() + targets.sum() + smooth)  
        return 1 - dice
dice_loss = DiceLoss()


class_num = 2

w = 2

h = 2

batch_size = 1

A = torch.ones([batch_size,class_num,w,h]) # A.shape = torch.Size([batch_size, cls, w, h])

B = torch.zeros([batch_size,1,w,h]).long() # B.shape = torch.Size([batch_size, 1, w, h])

dice_loss(A,B,num_classes = 2)