How to Save DataLoader?

Maybe

import random
from torch.utils.data import Sampler, DataLoader

class MySampler(Sampler):
    def __init__(self, data_source):
        self.seq = list(range(len(data_source)))
    def __iter__(self):
        return iter(self.seq)

dataset = LoadYourDataset()
sampler = MySampler(dataset)
dataloader = DataLoader(dataset, shuffle=False, sampler=sampler)

for epoch in range(0, 999):
    random.shuffle(dataloader.sampler.seq)
    for i, (x, y) in enumerate(dataloader):
        # some code.
        # save i and dataloader.sampler.seq

I cannot promise that this code will work (just an idea).