I have a dataloader for my training dataset.
"
Pytorch dataloader
a_loader = torch.utils.data.DataLoader(trainingDataset, batch_size=1 ,
shuffle=False,num_workers=0)
print('a_loader has {} samples'.format(len(a_loader)))
a_fake_sample = utils.Sample_from_Pool()
b_fake_sample = utils.Sample_from_Pool()
for epoch in range(self.start_epoch, args.epochs):
lr = self.g_optimizer.param_groups[0]['lr']
print('learning rate = %.7f' % lr)
for i, (batchct, batchpet) in enumerate(a_loader, 0):
# step
step = epoch * min(len(a_loader)) + i + 1
But for the step it gives me an error βintβ object is not iterable.
How can I solve this?