Image dataloader error

traindir_A = 'F:/rgb_images/'
traindir_B = 'F:/rgb_images/'


class ConcatDataset(Dataset):
    def __init__(self, dataa,datab):
        self.datasets=dataa + datab

    def __getitem__(self, i):
        return tuple(d[i] for d in self.datasets)

    def __len__(self):
        return min(len(d) for d in self.datasets)

train_loader = datasets.ImageFolder(traindir_A), datasets.ImageFolder(traindir_B) ),
             batch_size=batch_size, shuffle=True,
             num_workers=workers, pin_memory=True)

def main():
    for i, (input, target) in enumerate(train_loader):

if __name__ == '__main__':

############ output error

File “F:\”, line 5, in
from import Dataset, DataLoader, random_split, sampler
File “C:\Users\Anaconda3\envs\py36\lib\site-packages\”, line 124, in
raise err
OSError: [WinError 1455] The paging file is too small for this operation to complete. Error loading “C:\Users\Anaconda3\envs\py36\lib\site-packages\torch\lib\caffe2_detectron_ops_gpu.dll” or one of its dependencies.
return _default_context.get_context().Process._Popen(process_obj)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\”, line 322, in _Popen
return Popen(process_obj)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\”, line 65, in init
reduction.dump(process_obj, to_child)
File “C:\Users\Anaconda3\envs\py36\lib\multiprocessing\”, line 60, in dump
ForkingPickler(file, protocol).dump(obj)
BrokenPipeError: [Errno 32] Broken pipe

is this pytorch version error ?

Maybe related here