My code is…
batch_size = 16
transform = transforms.Compose([transforms.Resize((299,299))
,transforms.ToTensor()
,transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])
dataset = ImageFolder('.data/',transform=transform)
kf = KFold(n_splits=5, shuffle=True)
for i, (train_index, test_index) in enumerate(kf.split(dataset)):
trainloader = torch.utils.data.DataLoader(train_index, batch_size=batch_size, shuffle=True, num_workers=0, pin_memory=False)
testloader = torch.utils.data.DataLoader(test_index, batch_size=batch_size, shuffle=True, num_workers=0, pin_memory=False)
print('Fold : {}, train : {}, test : {}'.format(i+1, len(trainloader.dataset), len(testloader.dataset)))
for batch_idx, (data, target) in enumerate(trainloader):
print('Train Batch idx : {}, data shape : {}, target shape : {}'.format(batch_idx, data.shape, target.shape))
error occurred.
Fold : 1, train : 579, test : 145
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-6-0fe2bfb82b09> in <module>
16 print('Fold : {}, len train : {}, len test : {}'.format(i+1, len(trainloader.dataset), len(testloader.dataset)))
17
---> 18 for batch_idx, (data, target) in enumerate(trainloader):
19 print('Train Batch idx : {}, data shape : {}, target shape : {}'.format(batch_idx, data.shape, target.shape))
ValueError: too many values to unpack (expected 2)
I don’t know how to handle train_index, test_index.
Could anyone give me a help ?