Mini-batch gradient decent bad accuracy and loss

Hey, I’m trying mini-batch gradient descent on the popular iris dataset, but somehow I don’t manage to get the accuracy of the model above 75-80%. Also, I’m not certain if I’m calculating the loss as well as the accuracy correctly. Any suggestions on how to improve my code or mistakes I’m doing are appreciated.

batch_size = 10
train_loader = DataLoader(dataset, batch_size=batch_size, shuffle=True) 
test_loader = DataLoader(dataset, batch_size=batch_size, shuffle=True)

Training loop:

n_iters = 1000
steps = n_iters/10
LOSS = []
for epochs in range(n_iters):  
    for i,(inputs, labels) in enumerate(train_loader):
        out = model(inputs)
        train_labels = transform_label(labels)
        l = loss(out, train_labels)
        l.backward()
        #update weights
        optim.step()
        optim.zero_grad()
    LOSS.append(l.item())
    if epochs%steps == 0:
        print(f"\n epoch: {int(epochs+steps)}/{n_iters}, loss: {sum(LOSS)/len(LOSS)}")
        #if i % 1 == 0:
            #print(f" steps: {i+1}, loss : {l.item()}")

claculate accuracy:

    def accuracy(model,test_loader):
        sum_acc= 0
        #map labels with 0,1,2
        def transform_label(label_data):
            data = []
            for i in label_data:
                if i == "Iris-setosa":
                    data.append(torch.tensor([0]))
                if i == "Iris-versicolor":
                    data.append(torch.tensor([1]))
                if i == "Iris-virginica":
                    data.append(torch.tensor([2]))
            return torch.stack(data)

        for i,(X_test, test_labels) in enumerate(test_loader):
            test_labels = transform_label(test_labels)
            x_label_pre = model(X_test)
            _, x_label_pre_hat = torch.max(x_label_pre, 1)
            idx = 0
            number_pred = 0
            while idx < len(X_test):
                if x_label_pre_hat[idx].item() == test_labels[idx].item():
                    number_correct += 1
                idx +=1
            accuracy_per_epoch = (number_correct/len(X_test))*100
            print(f"accuracy of batch {i}:\n{accuracy_per_epoch}%")
            sum_acc += accuracy_per_epoch
        return print(f"\ntotal accuracy of model {(sum_acc/len(test_loader)):.2f}%")