transform = transforms.Compose(
[transforms.Resize((375,375)), transforms.ToTensor()])
path = #path to folder containing Images
train_loader = torch.utils.data.DataLoader(
torchvision.datasets.ImageFolder(path, transform=transform),
batch_size=100,
num_workers=0,
shuffle=True)
class DNN:
def __init__(self):
self.outputs = []
self.model = models.resnet50(pretrained=True)
self.model.avgpool.register_forward_hook(self.hook)
self.model.eval()
def hook(self,module, input, output):
self.outputs.append(output)
# Loop over
def step(self, inputs):
data, label = inputs # ignore label
_ = self.model(data)
return 0
def predict(self, dataloader):
for i, batch in enumerate(dataloader):
print(i)
_ = self.step(batch)
a = DNN()
a.predict(train_loader)
I am trying to get features from the pre-final layer of resnet18 but even after a single batch, the swap memory on my cpu keeps increasing till my laptop runs out of memory.
I am not using a GPU. I’m using a 16gb MacBook pro and have about 25 GB of free space.