Is below the correct way to freeze all but top 8 feature layers for transfer learning?
def __init__(self, num_classes):
original_model = torchvision.models.vgg16(pretrained=True)
frozen_features = list(original_model.features.children())[:-8]
self.frozen_features = torch.nn.Sequential(*self.frozen_features)
features = list(original_model.features.children())
self.features = torch.nn.Sequential(*self.features)
for param in self.features.parameters():
param.requires_grad = True
for param in self.frozen_features.parameters():
param.requires_grad = False
self.classifier = nn.Sequential(
nn.Linear(number_of_features, 512),
nn.Sigmoid(),
nn.Linear(512, num_classes),
)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x