Training loss remains relatively stable. On test set, it produces an accuracy of 50%, which is akin to the model guessing since it only has 2 classes. I already tried increasing/decreasing model complexity, adjusting hyperparameters, data augmentation, basically anything to get the model to underfit/overfit the data. I can’t tell if there is something wrong with the neural network or with the dataset itself.
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Conv2d(1, 32, kernel_size=5, padding=0)
self.bn1 = nn.BatchNorm2d(32)
self.conv2 = nn.Conv2d(32, 64, kernel_size=5, padding=0)
self.bn2 = nn.BatchNorm2d(64)
self.conv3 = nn.Conv2d(64, 128, kernel_size=5, padding=0)
self.bn3 = nn.BatchNorm2d(128)
self.pool = nn.MaxPool2d(2, 2)
self.fc1 = nn.Linear(128 * 12 * 8, 512)
self.fc2 = nn.Linear(512, 256)
self.fc3 = nn.Linear(256, 2)
def forward(self, x):
x = self.pool(torch.relu(self.conv1(x)))
x = self.pool(torch.relu(self.conv2(x)))
x = self.pool(torch.relu(self.conv3(x)))
x = torch.flatten(x, 1)
x = torch.relu(self.fc1(x))
x = torch.relu(self.fc2(x))
x = self.fc3(x)
return x
num_epochs = 10
for epoch in range(num_epochs):
model.train()
running_loss = 0.0
correct_train = 0
total_train = 0
for images, labels in train_loader:
optimizer.zero_grad()
outputs = model(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
_, predicted = torch.max(outputs.data, 1)
total_train += labels.size(0)
correct_train += (predicted == labels).sum().item()
train_accuracy = 100 * correct_train / total_train
print(f"Epoch {epoch+1}, Training Loss: {running_loss/len(train_loader)}, Training Accuracy: {train_accuracy}%")
Epoch 1, Training Loss: 0.6989821430408594, Training Accuracy: 52.94117647058823%
Epoch 2, Training Loss: 0.6789375381036238, Training Accuracy: 58.8235294117647%
Epoch 3, Training Loss: 0.6709084140531945, Training Accuracy: 88.23529411764706%
Epoch 4, Training Loss: 0.6927016901247429, Training Accuracy: 52.94117647058823%
Epoch 5, Training Loss: 0.6819337732864149, Training Accuracy: 64.70588235294117%
Epoch 6, Training Loss: 0.6968633731206259, Training Accuracy: 47.05882352941177%
Epoch 7, Training Loss: 0.6873575990850275, Training Accuracy: 52.94117647058823%
Epoch 8, Training Loss: 0.6847923795382181, Training Accuracy: 58.8235294117647%
Epoch 9, Training Loss: 0.683509703838464, Training Accuracy: 64.70588235294117%
Epoch 10, Training Loss: 0.6756617174004064, Training Accuracy: 52.94117647058823%
Accuracy on test set: 50.0%