I have a global value in init function. I want to increase it as the number of epoch. However, it reset to zero every epoch. How should I fix it
import torch.utils.data as data
class My_Dataset(data.Dataset):
def __init__(self, data_list):
self.rand_num=0
def __getitem__(self, index):
self.rand_num +=1
def __len__(self):
return len(self.data_list)
vmirly1
(Vahid Mirjalili)
#2
Are you talking about self.rand_num
? I tried iterating through the dataset using a data_loader, and the value of rand_num keeps increasing:
import torch
import torch.utils.data as data
from torch.utils.data import DataLoader
class My_Dataset(data.Dataset):
def __init__(self, data_list):
self.data_list = data_list
self.rand_num=0
def __getitem__(self, index):
self.rand_num +=1
return torch.tensor([self.data_list[index]])
def __len__(self):
return len(self.data_list)
vals = [i for i in range(20)]
ds = My_Dataset(vals)
data_loader = DataLoader(ds, batch_size=10)
print(ds.rand_num)
next(iter(data_loader))
print(ds.rand_num)
for epoch in range(5):
for batch in data_loader:
print(epoch, ds.rand_num)
which will print out the following epoch and rand_num:
0
10
0 20
0 30
1 40
1 50
2 60
2 70
3 80
3 90
4 100
4 110
How about print in the getitem?
def __getitem__(self, index):
self.rand_num +=1
print (self.rand_num)
vmirly1
(Vahid Mirjalili)
#4
Even in __getitem__
it is still fine for me as long as I use num_workers=0
in DataLoader
.
But when I use num_workers=1
, then it gets reset to zero as you have mentioned. Do you have to use num_workers > 0?
Yes. I used number of worker is 1