I have a mistake : RuntimeError: [enforce fail at …\c10\core\CPUAllocator.cpp:72] data. DefaultCPUAllocator: not enough memory: you tried to allocate 5760000 bytes. Buy new RAM!
I am trying to download a very large 60gb file. Is there a way to split a file into parts?
data_load = np.load('np_data_2y.npy', mmap_mode='r')
class NetDataset(Dataset):
def __init__(self):
self.input_data = data_load
self.len = len(self.input_data)
self.x_data = torch.from_numpy(self.input_data)
self.x_data = self.x_data.float()
def __getitem__(self, index):
return self.x_data[index]
def __len__(self):
return self.len
dataset = NetDataset()
train_loader = DataLoader(dataset=dataset, batch_size=batch_work, shuffle=False)