How to freeze a specific layer in a custom network?

when i try to freeze a specific layer in a custom network,it report errors how to solve this problem?

class Net(nn.Module):
def init(self):
super(Net, self).init()
def conv_bn(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
def conv_dw(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.BatchNorm2d(inp),
nn.ReLU(inplace=True),

            nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
            nn.BatchNorm2d(oup),
            nn.ReLU(inplace=True),
        )
    self.model = nn.Sequential(
        conv_bn(3, 32, 2),
        conv_dw(32, 64, 1),
        conv_dw(64, 128, 2),
        conv_dw(128, 128, 1),
        conv_dw(128, 256, 2),
        conv_dw(256, 256, 1),
        conv_dw(256, 512, 2),
        conv_dw(512, 512, 1),
        conv_dw(512, 512, 1),
        conv_dw(512, 512, 1),
        conv_dw(512, 512, 1),
        conv_dw(512, 512, 1),
        conv_dw(512, 1024, 2),
        conv_dw(1024, 1024, 1),
        nn.AvgPool2d(7),
    )
    self.fc = nn.Linear(1024, 1000)
def forward(self, x):
    x = self.model(x)
    x = x.view(-1, 1024)
    x = self.fc(x)
    return x

The error is raised by:

mobilenet.weight.requires_grad = False

since mobilenet (and your Net implementation) doesn’t have a .weight attribute.
If you want to freeze parameters, you would need to access the desired layer e.g. via mobilenet.model[0][0].weight.

1 Like