Resizing TIF images

I know that this topic has been brought several times on this platform already, but bear with me.
I am trying to resize these TIF images from (384, 384) to (256, 256) using the following code:

class RGBCloudDataset (Dataset):
    def __init__(self, red_dir, blue_dir, green_dir, gt_dir):
        

        # Listing subdirectories
        # Loop through the files in red folder  
        # and combine, into a dictionary, the other bands
        
        self.files = [self.combine_files(f, green_dir, blue_dir,gt_dir) 
                      for f in red_dir.iterdir() if not f.is_dir()]
        
                
        random.seed (seed)
        self.files = random.sample (self.files, k= 2000)
        
    def combine_files(self, red_file: Path, green_dir, blue_dir,  gt_dir):
        
        files = {'red': red_file, 
                 'green':green_dir/red_file.name.replace('red', 'green'),
                 'blue': blue_dir/red_file.name.replace('red', 'blue'), 
                 'gt': gt_dir/red_file.name.replace('red', 'gt')}

        return files
    
    
    
    def OpenAsArray(self, idx, invert=False):
        
        red_channel   = Image.open(self.files[idx]['red'])
        green_channel = Image.open(self.files[idx]['green'])
        blue_channel  = Image.open(self.files[idx]['blue'])
        
        red_channel   = resize(red_channel, (256, 256), mode = "constant", 
                               preserve_range = True, anti_aliasing = False)
        green_channel = resize(green_channel, (256, 256), mode = "constant", 
                               preserve_range = True, anti_aliasing = False)
        blue_channel  = resize(blue_channel, (256, 256), mode = "constant", 
                               preserve_range = True, anti_aliasing = False)
        
        
        raw_rgb=np.stack([np.array(red_channel),
                          np.array(green_channel),
                          np.array(blue_channel)], axis = 2)
     

        if invert:
            raw_rgb = raw_rgb.transpose((2, 0, 1))
    
    
        return (raw_rgb / np.iinfo(raw_rgb.dtype).max)
    
    
    
    
    def OpenMask(self, idx, add_dims=False):
        
        raw_mask=np.array(Image.open(self.files[idx]['gt']))
        raw_mask = np.where(raw_mask==255, 1, 0)
        
        
        return np.expand_dims(raw_mask, 0) if add_dims else raw_mask



        
    def __len__(self):
        
        return len(self.files)
    
    
    
    def __getitem__(self, idx):
        
        x = torch.tensor(self.OpenAsArray(idx, invert=True), dtype=torch.float32)
        y = torch.tensor(self.OpenMask(idx, add_dims=False), dtype=torch.int64)
        
        return x, y
    
    
    
    def open_as_pil(self, idx):
        
        arr = 256 * self.OpenAsArray(idx)
        
        return Image.fromarray(arr.astype(np.uint8), 'RGB')  
    
    
    
    def __repr__(self):
        
        s = 'Dataset class with {} files'.format(self.__len__())

        return s

But this throws the following error:

AttributeError                            Traceback (most recent call last)
<ipython-input-24-e51ffe5ad5d8> in <module>
     23 RGBtest_loader  = DataLoader(RGBtest_dataset , batch_size=4, shuffle=True, num_workers=2)
     24 
---> 25 rgb_img, mask = next(iter(RGBtrain_loader))
     26 
     27 print('\n')

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in __next__(self)
    433         if self._sampler_iter is None:
    434             self._reset()
--> 435         data = self._next_data()
    436         self._num_yielded += 1
    437         if self._dataset_kind == _DatasetKind.Iterable and \

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _next_data(self)
   1083             else:
   1084                 del self._task_info[idx]
-> 1085                 return self._process_data(data)
   1086 
   1087     def _try_put_index(self):

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _process_data(self, data)
   1109         self._try_put_index()
   1110         if isinstance(data, ExceptionWrapper):
-> 1111             data.reraise()
   1112         return data
   1113 

/opt/conda/lib/python3.7/site-packages/torch/_utils.py in reraise(self)
    426             # have message field
    427             raise self.exc_type(message=msg)
--> 428         raise self.exc_type(msg)
    429 
    430 

AttributeError: Caught AttributeError in DataLoader worker process 0.
Original Traceback (most recent call last):
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/worker.py", line 198, in _worker_loop
    data = fetcher.fetch(index)
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp>
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataset.py", line 272, in __getitem__
    return self.dataset[self.indices[idx]]
  File "<ipython-input-22-7ca78407731d>", line 74, in __getitem__
    x = torch.tensor(self.OpenAsArray(idx, invert=True), dtype=torch.float32)
  File "<ipython-input-22-7ca78407731d>", line 34, in OpenAsArray
    preserve_range = True, anti_aliasing = False)
  File "/opt/conda/lib/python3.7/site-packages/skimage/transform/_warps.py", line 93, in resize
    input_shape = image.shape
AttributeError: 'TiffImageFile' object has no attribute 'shape'

I have also used torchvision.transforms, but for some reason using transforms.Resize(256) throws an error. Any help is very much appreciated !

Based on the error message self.OpenAsArray(idx, invert=True) seems to be a TiffImageFile, which cannot be transformed to a PyTorch tensor and thus this line of code raises the error:

x = torch.tensor(self.OpenAsArray(idx, invert=True), dtype=torch.float32)

You would thus have to make sure to transform this TiffImageFile to e.g. a numpy array before and transform it to a tensor via x = torch.from_numpy(np_arr).

Thanks @ptrblck for your swift response.
I am not sure if I am following, but I tried this :

    def __getitem__(self, idx):
        x = self.OpenAsArray(idx, invert=True)
        y = self.OpenMask(idx, add_dims=False)
        
        
        if self.transform is not None:
            x, y = self.transform((torch.from_numpy(x), torch.from_numpy(y)))
        
        
        return x,y
train_transforms=transforms.Compose([transforms.Resize(256),
                                    transforms.ToTensor()])
RGBdata = RGBCloudDataset(red_dir, blue_dir, green_dir, gt_dir, transform=train_transforms)

But still raises an error:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-14-e310956eca45> in <module>
      3 RGBtest_loader  = DataLoader(RGBtest_dataset , batch_size=2, shuffle=True, num_workers=2)
      4 
----> 5 rgb_img, mask = next(iter(RGBtrain_loader))
      6 
      7 print('\n')

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in __next__(self)
    433         if self._sampler_iter is None:
    434             self._reset()
--> 435         data = self._next_data()
    436         self._num_yielded += 1
    437         if self._dataset_kind == _DatasetKind.Iterable and \

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _next_data(self)
   1083             else:
   1084                 del self._task_info[idx]
-> 1085                 return self._process_data(data)
   1086 
   1087     def _try_put_index(self):

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _process_data(self, data)
   1109         self._try_put_index()
   1110         if isinstance(data, ExceptionWrapper):
-> 1111             data.reraise()
   1112         return data
   1113 

/opt/conda/lib/python3.7/site-packages/torch/_utils.py in reraise(self)
    426             # have message field
    427             raise self.exc_type(message=msg)
--> 428         raise self.exc_type(msg)
    429 
    430 

TypeError: Caught TypeError in DataLoader worker process 0.
Original Traceback (most recent call last):
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/worker.py", line 198, in _worker_loop
    data = fetcher.fetch(index)
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp>
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataset.py", line 272, in __getitem__
    return self.dataset[self.indices[idx]]
  File "<ipython-input-11-842d53b2f062>", line 65, in __getitem__
    x, y = self.transform((torch.from_numpy(x), torch.from_numpy(y)))
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/transforms.py", line 67, in __call__
    img = t(img)
  File "/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
    result = self.forward(*input, **kwargs)
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/transforms.py", line 267, in forward
    return F.resize(img, self.size, self.interpolation)
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/functional.py", line 310, in resize
    return F_pil.resize(img, size=size, interpolation=interpolation)
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/functional_pil.py", line 410, in resize
    raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
TypeError: img should be PIL Image. Got <class 'tuple'>
type or paste code here

You are now passing a tuple to the transformation, which isn’t supported, so you would need to apply the transformation on both tensors separately.

Alright. I changed it to this :

        if self.transform is not None:
            x = self.transform(torch.from_numpy(x))
            y = self.transform(torch.from_numpy(y))
        
        
        return x,y

and now it says this:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-18-e310956eca45> in <module>
      3 RGBtest_loader  = DataLoader(RGBtest_dataset , batch_size=2, shuffle=True, num_workers=2)
      4 
----> 5 rgb_img, mask = next(iter(RGBtrain_loader))
      6 
      7 print('\n')

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in __next__(self)
    433         if self._sampler_iter is None:
    434             self._reset()
--> 435         data = self._next_data()
    436         self._num_yielded += 1
    437         if self._dataset_kind == _DatasetKind.Iterable and \

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _next_data(self)
   1083             else:
   1084                 del self._task_info[idx]
-> 1085                 return self._process_data(data)
   1086 
   1087     def _try_put_index(self):

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _process_data(self, data)
   1109         self._try_put_index()
   1110         if isinstance(data, ExceptionWrapper):
-> 1111             data.reraise()
   1112         return data
   1113 

/opt/conda/lib/python3.7/site-packages/torch/_utils.py in reraise(self)
    426             # have message field
    427             raise self.exc_type(message=msg)
--> 428         raise self.exc_type(msg)
    429 
    430 

TypeError: Caught TypeError in DataLoader worker process 0.
Original Traceback (most recent call last):
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/worker.py", line 198, in _worker_loop
    data = fetcher.fetch(index)
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp>
    data = [self.dataset[idx] for idx in possibly_batched_index]
  File "/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataset.py", line 272, in __getitem__
    return self.dataset[self.indices[idx]]
  File "<ipython-input-15-aa8ec7b692da>", line 65, in __getitem__
    x = self.transform(torch.from_numpy(x))
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/transforms.py", line 67, in __call__
    img = t(img)
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/transforms.py", line 104, in __call__
    return F.to_tensor(pic)
  File "/opt/conda/lib/python3.7/site-packages/torchvision/transforms/functional.py", line 64, in to_tensor
    raise TypeError('pic should be PIL Image or ndarray. Got {}'.format(type(pic)))
TypeError: pic should be PIL Image or ndarray. Got <class 'torch.Tensor'>