transform = transforms.Compose([
transforms.ColorJitter(0.1, 0.3, 0.3, 0.05),
transforms.RandomHorizontalFlip(),
my_folder.RandomRotate(),
my_folder.RandomVerticalFlip(),
transforms.ToTensor(),
Cutout(n_holes = args.n_holes, length=args.cutout_length)
])
…
train_loader = DataLoader(dataset=train_dataset, shuffle=False,
batch_size=args.batch_size, num_workers=24)
…
RANDOM_SEED = random.randint(0,1e+5)
torch.manual_seed(RANDOM_SEED)
torch.cuda.manual_seed(RANDOM_SEED)
torch.cuda.manual_seed_all(RANDOM_SEED)
np.random.seed(RANDOM_SEED)
random.seed(RANDOM_SEED)
for batch_idx, (data, _, _,) in enumerate(train_loader) :
x1 = data
print(x1[0])
break
torch.manual_seed(RANDOM_SEED)
torch.cuda.manual_seed(RANDOM_SEED)
torch.cuda.manual_seed_all(RANDOM_SEED)
np.random.seed(RANDOM_SEED)
random.seed(RANDOM_SEED)
for batch_idx, (data, _, _,) in enumerate(train_loader) :
x2 = data
print(x2[0])
break
I’m trying to make some tricky networks, and I need to get exactly the same data with same order twice. x1 and x2 become exactly the same if I set num_workers=0. but otherwise, It isn’t. (But using single worker is not an option for me.)
Is there a way to fix the random seed of whole workers?
Thanks.