I have made a dataset using pytoch dataloader and Imagefolder, my dataset class has two Imagefolder dataset. These two datasets are paired(original and ground truth image). I want to feed these to pytorch neural network. Dataset class:
class bsds_dataset(Dataset):
def __init__(self, ds_main, ds_energy):
self.dataset1 = ds_main
self.dataset2 = ds_energy
def __getitem__(self, index):
x1 = self.dataset1[index]
x2 = self.dataset2[index]
return x1, x2
def __len__(self):
return len(self.dataset1)
and my collate function:
def my_collate(batch):
data = [item[0] for item in batch]
target = [item[1] for item in batch]
target = torch.LongTensor(target)
return [data, target]
my I am trying to use images as original and target data. Like image segmentation task. I have asked this question an answer reffered me to this post for detailed example and I used that snippet but its not working for me because of size of scalars, I don’t sure what ot do please help me.
Loading batches:
original_imagefolder = './images/whole'
target_imagefolder = './results/whole'
original_ds = ImageFolder(original_imagefolder, transform=transforms.ToTensor())
energy_ds = ImageFolder(target_imagefolder, transform=transforms.ToTensor())
dataset = bsds_dataset(original_ds, energy_ds)
loader = DataLoader(dataset, batch_size=16, collate_fn=my_collate)
for epoch in range(epochs):
for i, x, y in enumerate(loader):
print(x)
and the full traceback (including warning):
C:\Anaconda3\envs\torchgpu\lib\site-packages\ipykernel_launcher.py:77: UserWarning: nn.init.xavier_normal is now deprecated in favor of nn.init.xavier_normal_.
C:\Anaconda3\envs\torchgpu\lib\site-packages\ipykernel_launcher.py:78: UserWarning: nn.init.constant is now deprecated in favor of nn.init.constant_.
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-42-4646d595e649> in <module>
5 optimizer = optim.SGD(model.parameters(), lr=0.001)
6 for epoch in range(epochs):
----> 7 for i, x, y in enumerate(loader):
8 print(x)
C:\Anaconda3\envs\torchgpu\lib\site-packages\torch\utils\data\dataloader.py in __next__(self)
558 if self.num_workers == 0: # same-process loading
559 indices = next(self.sample_iter) # may raise StopIteration
--> 560 batch = self.collate_fn([self.dataset[i] for i in indices])
561 if self.pin_memory:
562 batch = _utils.pin_memory.pin_memory_batch(batch)
<ipython-input-38-0a73fb00a6d1> in my_collate(batch)
2 data = [item[0] for item in batch]
3 target = [item[1] for item in batch]
----> 4 target = torch.LongTensor(target)
5 return [data, target]
ValueError: only one element tensors can be converted to Python scalars