Pytoch DataLoader labels return an array instead of a number

When I loop over the dataloader, it gives me an array for the label instead of a number.
In my Dataset function Type getitem return an image and an int.

class Dataset(torch.utils.data.Dataset):
    def __init__(self, label_list, directory):
        self.label_list = label_list
        self.directory = directory
        self.df = pd.DataFrame(columns=['Image', 'Type']) # new dataframe

    def __len__(self):
        return len(self.label_list)

    def __getitem__(self, index):
        
        X = self.df.iloc[index]['Image']
        X = torch.from_numpy(X.astype(np.float64))
        X = torch.moveaxis(X, 2, 0)
        X = transforms.functional.rgb_to_grayscale(X)
        y = self.df.iloc[index]['Type']

        return X, y
    
    # data augmentation + df to new df
    def rotate(self):
        list = self.label_list[self.label_list['type'] == 0] # list of all value == 0
        it = 0 # iterator
        
        # flip image + add to new dataframe
        for index, rows in list.iterrows():
            img = cv2.imread(self.directory + rows['id'])
            flipLR = np.fliplr(img) # change image from left to right
            
            self.df.loc[it] = [img, rows['type']] # add img to df
            it += 1
            
        # add former df to new df
        self.new_df(it)
            
    # old df to add new df with loaded img
    def new_df(self, it):
        # add former df to new df
        for index, rows in self.label_list.iterrows():
            img = cv2.imread(self.directory + rows['id'])
            self.df.loc[it] = [img, rows['type']] # add value to new df
            it += 1

batch_size = 32
params = {'batch_size': batch_size,
          'shuffle': True}

# Generators
training_set = Dataset(train, "img/")
training_set.rotate() # data augmentation + load img
train_loader = torch.utils.data.DataLoader(training_set, **params)

test_set = Dataset(test, "img/")
test_set.new_df(0) # load img
test_loader = torch.utils.data.DataLoader(test_set, **params)

However in the loop test_loader, labels in test_loader; labels return an array and not a number

def test(): 
    correct = 0
    total = 0
    pred = []
    # Iterate through test dataset
    for images, labels in test_loader:
        # Load images
        #images = images.requires_grad_()

        outputs = model(images.float())
        
        # Get predictions from the maximum value
        _, predicted = torch.max(outputs.data, 1)
        
        # Total number of labels
        total += labels.size(0)

        # Total correct predictions
        pred.append(predicted)
        correct += (predicted == labels).sum()
        
    accuracy = 100 * correct / total
    # Print Loss
    print('correct : ', str(correct))
    print('total : ', str(total))
    print('Accuracy: {}'.format(accuracy))

why does it give me an array ?

The DataLoader returns batch_size samples and uses its internal collate_fn to create the batch of samples. By default it should create a tensor containing 32 integer values for your labels.
I’m not sure if I understand your question correctly but the DataLoader won’t return 32 separate integers in case this is your expectation.

You perfectly unsderstood my question. Thank you for your answer