Tranfer Learning: Freezing some convolution layers

Is below the correct way to freeze all but top 8 feature layers for transfer learning?

def __init__(self, num_classes):        
	original_model = torchvision.models.vgg16(pretrained=True)
	frozen_features = list(original_model.features.children())[:-8]
	self.frozen_features = torch.nn.Sequential(*self.frozen_features)

	features = list(original_model.features.children())
	self.features = torch.nn.Sequential(*self.features)

	for param in self.features.parameters():
	    param.requires_grad = True
	for param in self.frozen_features.parameters():
	    param.requires_grad = False

	self.classifier = nn.Sequential(
	    nn.Linear(number_of_features, 512),
	    nn.Sigmoid(),
	    nn.Linear(512, num_classes),
	)

def forward(self, x):
        x = self.features(x)
        x = x.view(x.size(0), -1)
        x = self.classifier(x)
        return x

It looks correct. but a bit complicated to read.

I would do something like below:

import torch
import torch.nn as nn
import torchvision

def print_requires_grad(module):
    for i, child in enumerate(module.children()):
        print(i)
        for param in child.parameters():
            print(param.requires_grad)    

class model(nn.Module):
    def __init__(self, num_classes):   
        super().__init__()     
        original_model = torchvision.models.vgg16(pretrained=True)

        self.features = original_model.features
        for child in list(self.features.children())[:-8]:
            for param in child.parameters():
                param.requires_grad = False

        self.classifier = nn.Sequential(
            nn.Linear(number_of_features, 512),
            nn.Sigmoid(),
            nn.Linear(512, num_classes),
        )

    def forward(self, x):
            x = self.features(x)
            x = x.view(x.size(0), -1)
            x = self.classifier(x)
            return x
1 Like