Image dataloader error

traindir_A = 'F:/rgb_images/'
traindir_B = 'F:/rgb_images/'

train_data_t=300
test_data_t=44

class ConcatDataset(Dataset):
    def __init__(self, dataa,datab):
        self.datasets=dataa + datab


    def __getitem__(self, i):
        return tuple(d[i] for d in self.datasets)


    def __len__(self):
        return min(len(d) for d in self.datasets)

train_loader = torch.utils.data.DataLoader(ConcatDataset( datasets.ImageFolder(traindir_A), datasets.ImageFolder(traindir_B) ),
             batch_size=batch_size, shuffle=True,
             num_workers=workers, pin_memory=True)

def main():
    for i, (input, target) in enumerate(train_loader):
        print(input.shape,targrt.shape)

if __name__ == '__main__':
    main()

############ output error

File “F:\image_load.py”, line 5, in
from torch.utils.data import Dataset, DataLoader, random_split, sampler
File “C:\Users\Anaconda3\envs\py36\lib\site-packages\torch_init_.py”, line 124, in
raise err
OSError: [WinError 1455] The paging file is too small for this operation to complete. Error loading “C:\Users\Anaconda3\envs\py36\lib\site-packages\torch\lib\caffe2_detectron_ops_gpu.dll” or one of its dependencies.
return _default_context.get_context().Process._Popen(process_obj)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\context.py”, line 322, in _Popen
return Popen(process_obj)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\popen_spawn_win32.py”, line 65, in init
reduction.dump(process_obj, to_child)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\reduction.py”, line 60, in dump
ForkingPickler(file, protocol).dump(obj)
BrokenPipeError: [Errno 32] Broken pipe

is this pytorch version error ?

Maybe related here