Adding custom net to the pretrained model

I am using renet50 as a pretrained model.
Now in resnet50 we have one fc layer and layer4 so I want to remove both the layers completely and feed the output of the previous layer to my new net:

class convNet(nn.Module):
    #constructor
    def __init__(self):
        super(convNet, self).__init__()
        #defining layers in convnet
        self.conv1 = nn.Conv2d(2048,1024, kernel_size=3,stride=1,padding=1)
        self.conv2 = nn.Conv2d(1024,512, kernel_size=3,stride=1,padding=1)
        self.conv3 = nn.Conv2d(512,256,kernel_size=3,stride=1,padding=1)

        self.pconv1= nn.Conv2d(256,256, kernel_size=(3,3),stride=1,padding=(1,1))
        self.pconv2= nn.Conv2d(256,256, kernel_size=(3,7),stride=1,padding=(1,3))
        self.pconv3= nn.Conv2d(256,256, kernel_size=(7,3),stride=1,padding=(3,1))

        self.conv4= nn.Conv2d(256,64,kernel_size=3,stride=1,padding=1)
        self.conv5= nn.Conv2d(64,1,kernel_size=3,stride=1,padding=1)
        
    def forward(self, x):
        x = nnFunctions.relu(self.conv1(x))
        x = nnFunctions.relu(self.conv2(x))
        x = nnFunctions.relu(self.conv3(x))
        #parallel conv
        x = nnFunctions.relu(self.pconv1(x)+self.pconv2(x)+self.pconv3(x))
        
        x = nnFunctions.relu(self.conv4(x))
        x = nnFunctions.relu(self.conv5(x))
        return x

How can I remove the fc and layer4?
How can I add the above network to the pretrained resnet50 and also I want to use fine tuning so I want to set require_grad=True for layer3 i.e last layer after removing fc and layer4, how can I do the same

1 Like

Write a new forward function that starts from the resnet50 forward function, but modifies it in the way you want.
All your questions can be done this way.

2 Likes

So you are saying

class convNet(nn.Module):
    #constructor
    def __init__(self,resnet,mynet):
        super(convNet, self).__init__()
        #defining layers in convnet
        self.resnet=resnet
        myNet=mynet
    def forward(self, x):
        x=self.resnet.layer1(x)
        x=self.resnet.layer2(x)
        x=self.resnet.layer3(x)
        x=myNet(x)
        return x

Is it okay if I just write self.resnet.layer1(x) or do I have to write everything for each conv layer in layer1?
And how can I set require_grad=False for layer1 and layer2 and require_grad=True for layer3

you can set it to false, like self.resnet.layer1.requires_grad=False (try it out)

Hi Soumith,

In case i want activations from a certain intermediate layer of my model, i should just rewrite the forward function call or is there maybe a more straight forward way to achieve the same. Thanks in anticipation.