Yes, DataLoader
doesn’t have any conditions on the number of outputs of your Dataset
as seen here:
class MyDataset(Dataset):
def __init__(self):
self.data = torch.randn(100, 1)
def __getitem__(self, index):
x = self.data[index]
return x
def __len__(self):
return len(self.data)
dataset = MyDataset()
loader = DataLoader(
dataset,
batch_size=5,
num_workers=2,
shuffle=True
)
for data in loader:
print(data.shape)