I have tried to freeze part of my model but it does not work. Gradient computation is still enabled for each layer. Is that some sort of bug or am I doing something wrong?
model = models.resnet18(pretrained=True)
# To freeze the residual layers
for param in model.parameters():
param.require_grad = False
for param in model.fc.parameters():
param.require_grad = True
# Replace last layer
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, 2)
model.fc = nn.Dropout(0.5)
# Find total parameters and trainable parameters
total_params = sum(p.numel() for p in model.parameters())
print(f'{total_params:,} total parameters.')
total_trainable_params = sum(
p.numel() for p in model.parameters() if p.requires_grad)
print(f'{total_trainable_params:,} training parameters.')
21,284,672 total parameters.
21,284,672 training parameters.