I have images in a folder. So, I made a custom dataset
to load the images.
class CustomDataSet(Dataset):
def __init__(self, main_dir, transform=None):
self.main_dir = main_dir
self.transform = transform
self.all_imgs = os.listdir(main_dir)
def __len__(self):
return len(self.all_imgs)
def __getitem__(self, idx):
img_loc = os.path.join(self.main_dir, self.all_imgs[idx])
image = Image.open(img_loc).convert("RGB")
tensor_image = self.transform(image)
return tensor_image
Then, I am trying to make TensorDataset
from image_tensor and labels_tensor
. Finally, I want to make a DataLoader
using this TensorDataset
def load_train_data():
# Some code to the directory path etc.
# doing some preprocessing
transform = transforms.Compose([
transforms.Resize((256, 256)),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
#loading images using CustomDataSet
train_data_tensor = CustomDataSet(train_data_dir, transform=transform)
# Making TnesorDataset using both train_data_tensor and train_label_price_tensor
train_tensor = TensorDataset(train_data_tensor, train_label_price_tensor)
# Making Train Dataloader
train_loader = DataLoader(train_tensor, batch_size= 1, num_workers= 2, shuffle= True)
But, I am getting an error
Traceback (most recent call last):
File "preprocess.py", line 86, in <module>
load_train_data()
File "preprocess.py", line 75, in load_train_data
train_tensor = TensorDataset(train_data_tensor, train_label_price_tensor)
File "/home/akib/.local/lib/python3.8/site-packages/torch/utils/data/dataset.py", line 158, in __init__
assert all(tensors[0].size(0) == tensor.size(0) for tensor in tensors)
File "/home/akib/.local/lib/python3.8/site-packages/torch/utils/data/dataset.py", line 158, in <genexpr>
assert all(tensors[0].size(0) == tensor.size(0) for tensor in tensors)
AttributeError: 'CustomDataSet' object has no attribute 'size'
What wrong with my code?