Adding softmax layer with crossentropy

If i used this method for extract the features with loss = CrossEntropy . Should i add softmax layer but this way ?


def get_probabilities(outputs):
    return F.softmax(outputs, dim=1)  

class First (nn.Module):
    def __init__(self, modelA, modelB, 5):
        super(First, self).__init__()
        self.modelA = modelA
        self.modelB = modelB
        self.modelA.heads = nn.Identity()
        self.modelB.head. = nn.Identity()
        self.classifier = nn.Linear(1024 + 768, 5)

    def forward(self, x):
        x1 = self.modelA(x.clone())
        x1 = x1.view(x1.size(0), -1)
        x2 = self.modelB(x)
        x2 = x2.view(x2.size(0), -1)
        x = torch.cat((x1, x2), dim=1)
        x = self.classifier(F.relu(x))
        return x

what are the conditions that i need to apply the softmax layer and where ?

The CrossEntropy loss takes the unnormalized logits CrossEntropyLoss — PyTorch 2.5 documentation and will apply the softmax for you - you should not apply softmax before yourself