I have a problem with multi-classification I built this Model, but the test accuracy is constant at 4
input_size = 13
hidden1_size = 1024
hidden2_size = 1024
hidden3_size = 1024
hidden4_size = 1024
hidden5_size = 1024
output_size = 1976
class DNN(nn.Module):
def __init__(self, input_size, hidden1_size, hidden2_size, hidden3_size, hidden4_size, hidden5_size, output_size):
super(DNN, self).__init__()
self.fc1 = nn.Linear(input_size, hidden1_size)
self.sig1 = nn.Sigmoid()
self.fc2 = nn.Linear(hidden1_size, hidden2_size)
self.sig2 = nn.Sigmoid()
self.fc3 = nn.Linear(hidden2_size, hidden3_size)
self.sig3 = nn.Sigmoid()
self.fc4 = nn.Linear(hidden3_size, hidden4_size)
self.sig4 = nn.Sigmoid()
self.fc5 = nn.Linear(hidden4_size, hidden5_size)
self.sig5 = nn.Sigmoid()
self.fc6 = nn.Linear(hidden5_size, output_size)
def forward(self, x):
out = self.fc1(x)
out = self.sig1(out)
out = self.fc2(out)
out = self.sig2(out)
out = self.fc3(out)
out = self.sig3(out)
out = self.fc4(out)
out = self.sig4(out)
out = self.fc5(out)
out = self.sig5(out)
out = self.fc6(out)
return out
model = DNN(input_size, hidden1_size, hidden2_size, hidden3_size, hidden4_size, hidden5_size,
output_size)
criterion = nn.CrossEntropyLoss()
learning_rate = 0.008
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
for epoch in range(1, 50):
for i, (X_train, y_train) in enumerate(train_loader):
model.train()
optimizer.zero_grad()
outputs = model(Variable(X_train))
loss = criterion(outputs, Variable(y_train))
print('Iter %d/%d --> loss %f' % (i, len(train_loader), loss.item()))
loss.backward()
optimizer.step()
correct = 0
total = 0
print('test')
for X_test, y_test in test_loader:
model.eval()
out = model(Variable(X_test)).detach()
pred = out.max(dim=1)[1]#.argmax(dim=1, keepdim=True)
total += y_test.size(0)
correct += (pred.squeeze() == y_test).sum() # pred.eq(y_test.view_as(pre d)).sum().item()
accuracy = 100 * correct / total
print('epoch: {}. Accuracy: {}'.format(epoch, accuracy))