Binary Classification Problematic Accuracy/Loss

Hi, I’m trying to create a binary classification model to classify heart diseases. Here is the code:

class Model0(nn.Module):
    def __init__(self):
        super(Model0, self).__init__()
        self.layer_1 = nn.Linear(in_features=13, out_features=64)
        self.layer_2 = nn.Linear(in_features=64, out_features=32)
        self.layer_3 = nn.Linear(in_features=32, out_features=1)
        self.relu = nn.ReLU()
        self.sigmoid = nn.Sigmoid()
        self.dropout2 = nn.Dropout(0.1)
        self.dropout1 = nn.Dropout(0.1)
            
    def forward(self, x):
        x = self.relu(self.layer_1(x))
        x = self.dropout1(x)
        x = self.relu(self.layer_2(x))
        x = self.dropout2(x)
        x = self.layer_3(x)
        return x

        
model_0 = Model0().to(device)

loss_f = nn.BCELoss()
optimizer = torch.optim.Adam(params= model_0.parameters(), lr=0.01)      
        
epochs = np.arange(1, 301,1)
        
X_train_t, y_train_t = X_train_t.to(device), y_train_t.to(device)
X_test_t, y_test_t = X_test_t.to(device), y_test_t.to(device)
X_test_scaled_tensor, y_test_tensor = X_test_scaled_tensor.to(device), y_test_tensor.to(device)


w = []


for epoch in epochs:
    model_0.train()
    for data, labels in train_loader:
        data, labels = data.to(device), labels.to(device)
        """X_batch = X_train_t[start:start+30]
        y_batch = y_train_t[start:start+30]"""
        y_train_pred= model_0(data)
        y_train_pred = torch.round(torch.sigmoid(y_train_pred))
        loss = loss_f(y_train_pred, labels)
        acc =  accuracy_fn(y_true=labels, y_pred=y_train_pred) 
        train_accr.append(acc)
        optimizer.zero_grad()
        loss.backward()
        train_loss_val.append(loss.detach().numpy())
        optimizer.step()
        if epoch % 10 == 0:
            print(f"Epoch: {epoch} | Loss: {loss:.5f}, Accuracy: {acc:.2f}%")
        
model_0.eval()  
with torch.no_grad():
    for data, labels in test_loader:
        data, labels = data.to(device), labels.to(device)
        X_test_sq = model_0(data)
        X_test_sq_pred = torch.round(torch.sigmoid(X_test_sq))
        test_loss = loss_f(X_test_sq,labels)
        test_loss_val.append(test_loss)
        test_acc = accuracy_fn(y_true=labels,
                                           y_pred=X_test_sq_pred)
        test_accr.append(test_acc)
        w.append(test_acc)
        if epoch % 10 == 0:
            print(f"Epoch: {epoch} | Test loss: {test_loss:.5f}, Test acc: {test_acc:.2f}%")

And here is the first output of the train_loader (just in case for the control if there is any problem with the data)


train_loader = DataLoader(dataset=train_dataset, batch_size=32, shuffle=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=16, shuffle=True)
tensor([[-1.2373, -1.3944,  0.9528, -0.4989, -0.6298, -0.3820,  0.8701,  0.6544,
         -0.6872, -0.7476, -0.6780, -0.6763, -0.5489],
        [ 0.6161,  0.7171, -0.9802,  0.5611,  0.8387, -0.3820, -1.0316,  0.8747,
         -0.6872,  0.1706, -0.6780,  1.4321,  1.1375],
        [ 1.3792,  0.7171, -0.9802, -0.6167, -0.3361, -0.3820, -1.0316, -0.9316,
          1.4552,  1.4561, -0.6780,  1.4321,  1.1375],
        [-0.8012,  0.7171, -0.9802, -1.2056,  0.5083, -0.3820, -1.0316, -1.4162,
          1.4552, -0.0130, -0.6780,  0.3779, -0.5489],
        [-0.3651,  0.7171,  0.9528, -2.1479, -0.3728, -0.3820,  0.8701,  0.1698,
          1.4552, -0.9312,  0.9553,  0.3779,  1.1375],
        [-0.0381,  0.7171, -0.9802, -1.2056, -0.7583, -0.3820, -1.0316, -1.8568,
          1.4552, -0.9312, -0.6780,  0.3779, -0.5489],
        [-0.1471,  0.7171, -0.9802, -0.4400,  0.6367, -0.3820,  0.8701, -2.4295,
          1.4552,  0.9052, -0.6780,  1.4321,  1.1375],
        [-1.0193,  0.7171, -0.0137, -0.1456,  1.1140, -0.3820, -1.0316,  0.8747,
         -0.6872, -0.9312,  0.9553, -0.6763, -0.5489],
        [-0.3651, -1.3944,  0.9528,  0.5611,  1.1140, -0.3820, -1.0316, -0.3589,
         -0.6872,  0.4461,  0.9553,  0.3779, -0.5489],
        [ 0.3980,  0.7171, -0.0137, -0.6167,  0.6735, -0.3820, -1.0316,  0.4341,
         -0.6872,  0.7216, -0.6780, -0.6763, -0.5489],
        [-0.2561,  0.7171, -0.0137,  0.2078, -0.8501, -0.3820,  0.8701,  0.3460,
         -0.6872, -0.1966,  0.9553,  0.3779, -0.5489],
        [ 0.5071,  0.7171, -0.9802,  0.5611, -1.2906, -0.3820,  0.8701,  0.5222,
          1.4552, -0.9312,  0.9553,  0.3779,  1.1375],
        [ 1.1612,  0.7171, -0.9802, -1.2056,  0.0126, -0.3820, -1.0316,  0.3460,
         -0.6872, -0.3803,  0.9553,  1.4321, -2.2353],
        [ 0.9431, -1.3944, -0.9802, -1.3234,  0.3981, -0.3820,  0.8701,  0.8306,
          1.4552,  0.7216, -0.6780,  1.4321, -0.5489],
        [ 0.2890, -1.3944, -0.9802,  0.5611, -0.1159, -0.3820,  0.8701, -1.1960,
          1.4552, -0.7476, -0.6780, -0.6763,  1.1375],
        [-0.2561,  0.7171,  0.9528,  2.4457, -0.8868,  2.6176,  0.8701,  0.5222,
         -0.6872, -0.4721,  0.9553, -0.6763,  1.1375],
        [ 1.0522,  0.7171,  1.9193, -1.2056, -0.6665, -0.3820, -1.0316, -0.2708,
          1.4552,  0.7216, -0.6780, -0.6763, -0.5489],
        [ 0.2890,  0.7171, -0.9802, -1.2056, -0.8501, -0.3820,  0.8701, -1.0638,
          1.4552,  0.4461, -0.6780, -0.6763, -2.2353],
        [-0.5832, -1.3944, -0.9802, -0.0278,  0.3981, -0.3820,  0.8701,  0.5663,
         -0.6872, -0.9312,  0.9553, -0.6763, -0.5489],
        [ 0.8341, -1.3944, -0.9802,  0.5611,  2.6926, -0.3820, -1.0316,  0.3020,
         -0.6872,  0.1706, -0.6780, -0.6763, -0.5489],
        [-0.4741,  0.7171, -0.9802,  0.7967, -0.8684, -0.3820, -1.0316, -1.0638,
          1.4552, -0.1048, -0.6780, -0.6763,  1.1375],
        [ 1.1612,  0.7171, -0.9802, -0.6167, -1.2906, -0.3820,  0.8701, -0.4470,
         -0.6872, -0.5639,  0.9553, -0.6763,  1.1375],
        [ 1.0522,  0.7171,  0.9528, -0.3222,  1.1324, -0.3820,  0.8701, -0.8435,
          1.4552,  0.7216, -0.6780, -0.6763,  1.1375],
        [ 0.3980, -1.3944, -0.9802, -0.0278, -0.9235, -0.3820,  0.8701, -0.8435,
         -0.6872, -0.3803, -0.6780, -0.6763, -0.5489],
        [ 0.7251,  0.7171,  0.9528,  1.1500, -0.0791,  2.6176,  0.8701, -0.5792,
          1.4552, -0.0130, -0.6780, -0.6763, -0.5489],
        [-0.8012,  0.7171, -0.9802, -1.0878, -0.7950, -0.3820,  0.8701, -0.3148,
         -0.6872, -0.8394,  0.9553, -0.6763, -0.5489],
        [ 0.3980,  0.7171,  0.9528,  0.5611, -0.6665,  2.6176, -1.0316,  0.6544,
         -0.6872, -0.9312,  0.9553, -0.6763, -0.5489],
        [ 0.3980, -1.3944,  1.9193,  1.1500,  0.6551,  2.6176, -1.0316,  0.5222,
         -0.6872, -0.0130,  0.9553, -0.6763, -0.5489],
        [-1.3463,  0.7171,  0.9528, -0.6167, -0.1342,  2.6176,  0.8701,  1.9320,
         -0.6872, -0.1966, -2.3113, -0.6763,  1.1375],
        [ 0.8341, -1.3944, -0.9802, -0.3811, -0.7032, -0.3820,  0.8701,  0.5663,
         -0.6872, -0.9312,  0.9553, -0.6763, -0.5489],
        [ 1.3792,  0.7171,  0.9528,  1.2678, -0.6482, -0.3820, -1.0316, -0.0064,
         -0.6872, -0.1966, -0.6780, -0.6763,  1.1375],
        [ 2.1424, -1.3944, -0.0137, -0.6167,  0.3981, -0.3820, -1.0316, -1.2841,
          1.4552, -0.7476,  0.9553,  0.3779, -0.5489]])
tensor([[1.],
        [0.],
        [0.],
        [0.],
        [1.],
        [0.],
        [0.],
        [1.],
        [1.],
        [0.],
        [1.],
        [0.],
        [0.],
        [0.],
        [0.],
        [1.],
        [1.],
        [1.],
        [1.],
        [1.],
        [0.],
        [1.],
        [0.],
        [1.],
        [1.],
        [1.],
        [1.],
        [1.],
        [1.],
        [1.],
        [0.],
        [1.]])

However, the loss and accuracy values are quite problematic as you can see here:

Epoch: 200 | Loss: 81.25000, Accuracy: 18.75%
Epoch: 200 | Loss: 60.00000, Accuracy: 40.00%
Epoch: 210 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 210 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 210 | Loss: 68.75000, Accuracy: 31.25%
Epoch: 210 | Loss: 40.62500, Accuracy: 59.38%
Epoch: 210 | Loss: 75.00000, Accuracy: 25.00%
Epoch: 210 | Loss: 56.25000, Accuracy: 43.75%
Epoch: 210 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 220 | Loss: 56.25000, Accuracy: 43.75%
Epoch: 220 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 220 | Loss: 46.87500, Accuracy: 53.12%
Epoch: 220 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 220 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 220 | Loss: 53.12500, Accuracy: 46.88%
Epoch: 220 | Loss: 65.00000, Accuracy: 35.00%
Epoch: 230 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 230 | Loss: 53.12500, Accuracy: 46.88%
Epoch: 230 | Loss: 56.25000, Accuracy: 43.75%
Epoch: 230 | Loss: 68.75000, Accuracy: 31.25%
Epoch: 230 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 230 | Loss: 37.50000, Accuracy: 62.50%
Epoch: 230 | Loss: 70.00000, Accuracy: 30.00%
Epoch: 240 | Loss: 71.87500, Accuracy: 28.12%
Epoch: 240 | Loss: 53.12500, Accuracy: 46.88%
Epoch: 240 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 240 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 240 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 240 | Loss: 71.87500, Accuracy: 28.12%
Epoch: 240 | Loss: 65.00000, Accuracy: 35.00%
Epoch: 250 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 250 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 250 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 250 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 250 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 250 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 250 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 260 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 260 | Loss: 71.87500, Accuracy: 28.12%
Epoch: 260 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 260 | Loss: 68.75000, Accuracy: 31.25%
Epoch: 260 | Loss: 56.25000, Accuracy: 43.75%
Epoch: 260 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 260 | Loss: 45.00000, Accuracy: 55.00%
Epoch: 270 | Loss: 56.25000, Accuracy: 43.75%
Epoch: 270 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 270 | Loss: 43.75000, Accuracy: 56.25%
Epoch: 270 | Loss: 75.00000, Accuracy: 25.00%
Epoch: 270 | Loss: 53.12500, Accuracy: 46.88%
Epoch: 270 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 270 | Loss: 65.00000, Accuracy: 35.00%
Epoch: 280 | Loss: 68.75000, Accuracy: 31.25%
Epoch: 280 | Loss: 46.87500, Accuracy: 53.12%
Epoch: 280 | Loss: 50.00000, Accuracy: 50.00%
Epoch: 280 | Loss: 71.87500, Accuracy: 28.12%
Epoch: 280 | Loss: 53.12500, Accuracy: 46.88%
Epoch: 280 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 280 | Loss: 65.00000, Accuracy: 35.00%
Epoch: 290 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 290 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 290 | Loss: 75.00000, Accuracy: 25.00%
Epoch: 290 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 290 | Loss: 46.87500, Accuracy: 53.12%
Epoch: 290 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 290 | Loss: 60.00000, Accuracy: 40.00%
Epoch: 300 | Loss: 68.75000, Accuracy: 31.25%
Epoch: 300 | Loss: 62.50000, Accuracy: 37.50%
Epoch: 300 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 300 | Loss: 71.87500, Accuracy: 28.12%
Epoch: 300 | Loss: 59.37500, Accuracy: 40.62%
Epoch: 300 | Loss: 65.62500, Accuracy: 34.38%
Epoch: 300 | Loss: 75.00000, Accuracy: 25.00%
Epoch: 300 | Test loss: 0.70348, Test acc: 37.50%
Epoch: 300 | Test loss: 0.71140, Test acc: 25.00%
Epoch: 300 | Test loss: 0.70649, Test acc: 37.50%
Epoch: 300 | Test loss: 0.71591, Test acc: 18.75%
Epoch: 300 | Test loss: 0.69719, Test acc: 50.00%
Epoch: 300 | Test loss: 0.69873, Test acc: 45.45%

Can you help me to find what the problem might is?