Dataloader code:
class HRFDataset(Dataset):
def __init__(self, image_paths, target_paths, train=True):
self.image_paths = image_paths
self.target_paths = target_paths
def transform(self, image, mask):
# Resize
imageTransform= transforms.Compose([
transforms.Resize(size=(256, 256)),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # imagenet
])
maskTransform= transforms.Compose([
transforms.Resize(size=(256, 256)),
transforms.ToTensor()
])
image= imageTransform(image)
mask= maskTransform(mask)
return image, mask
def __getitem__(self, index):
image = Image.open(self.image_paths[index])
mask = Image.open(self.target_paths[index])
x, y = self.transform(image, mask)
return x, y
Initializing Dataset
# Creating training paths for dataset
extra_val="0"
train_dir="./dataset/images/"
mask_dir="./dataset/mask/"
train_array=[]
val_array=[]
for i in range(1,11):
if(i<10):
train_array.append(train_dir+extra_val+str(i)+"_dr.JPG")
train_array.append(train_dir+extra_val+str(i)+"_g.jpg")
train_array.append(train_dir+extra_val+str(i)+"_h.jpg")
else:
train_array.append(train_dir+str(i)+"_dr.JPG")
train_array.append(train_dir+str(i)+"_g.jpg")
train_array.append(train_dir+str(i)+"_h.jpg")
for i in range(1,11):
if(i<10):
val_array.append(mask_dir+extra_val+str(i)+"_dr_mask.tif")
val_array.append(mask_dir+extra_val+str(i)+"_g_mask.tif")
val_array.append(mask_dir+extra_val+str(i)+"_h_mask.tif")
else:
val_array.append(mask_dir+str(i)+"_dr_mask.tif")
val_array.append(mask_dir+str(i)+"_g_mask.tif")
val_array.append(mask_dir+str(i)+"_h_mask.tif")
# Creating Validation paths dataset
valid_image=[]
valid_mask=[]
for i in range(1,11):
if(i<10):
valid_image.append(train_dir+extra_val+str(i)+"_dr.JPG")
valid_image.append(train_dir+extra_val+str(i)+"_g.jpg")
valid_image.append(train_dir+extra_val+str(i)+"_h.jpg")
else:
valid_image.append(train_dir+str(i)+"_dr.JPG")
valid_image.append(train_dir+str(i)+"_g.jpg")
valid_image.append(train_dir+str(i)+"_h.jpg")
for i in range(1,11):
if(i<10):
valid_mask.append(mask_dir+extra_val+str(i)+"_dr_mask.tif")
valid_mask.append(mask_dir+extra_val+str(i)+"_g_mask.tif")
valid_mask.append(mask_dir+extra_val+str(i)+"_h_mask.tif")
else:
valid_mask.append(mask_dir+str(i)+"_dr_mask.tif")
valid_mask.append(mask_dir+str(i)+"_g_mask.tif")
valid_mask.append(mask_dir+str(i)+"_h_mask.tif")
train_set= HRFDataset(train_array,val_array)
val_set= HRFDataset(valid_image,valid_mask)
print train_set
Output of print statement:
Out[45]: <main.HRFDataset object at 0x7fe7d659d550>
Implementing Dataloader
from torch.utils.data import Dataset, DataLoader
image_datasets = {
'train': train_set, 'val': val_set
}
batch_size = 10
dataloaders = {
'train': DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=0),
'val': DataLoader(val_set, batch_size=batch_size, shuffle=True, num_workers=0)
}
dataset_sizes = {
x: len(image_datasets[x]) for x in image_datasets.keys()
}
dataset_sizes
But at this stage i am getting an error:
NotImplementedError Traceback (most recent call last)
in ()
8
9 dataloaders = {
—> 10 ‘train’: DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=0),
11 ‘val’: DataLoader(val_set, batch_size=batch_size, shuffle=True, num_workers=0)
12 }/home/gul/anaconda2/lib/python2.7/site-packages/torch/utils/data/dataloader.pyc in init(self, dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn)
800 if sampler is None:
801 if shuffle:
→ 802 sampler = RandomSampler(dataset)
803 else:
804 sampler = SequentialSampler(dataset)/home/gul/anaconda2/lib/python2.7/site-packages/torch/utils/data/sampler.pyc in init(self, data_source, replacement, num_samples)
58
59 if self.num_samples is None:
—> 60 self.num_samples = len(self.data_source)
61
62 if not isinstance(self.num_samples, int) or self.num_samples <= 0:/home/gul/anaconda2/lib/python2.7/site-packages/torch/utils/data/dataset.pyc in len(self)
18
19 def len(self):
—> 20 raise NotImplementedError
21
22 def add(self, other):NotImplementedError:
Can’t seem to find the problem. Looking forward to quick help.