I am using renet50
as a pretrained model.
Now in resnet50 we have one fc
layer and layer4
so I want to remove both the layers completely and feed the output of the previous layer to my new net:
class convNet(nn.Module):
#constructor
def __init__(self):
super(convNet, self).__init__()
#defining layers in convnet
self.conv1 = nn.Conv2d(2048,1024, kernel_size=3,stride=1,padding=1)
self.conv2 = nn.Conv2d(1024,512, kernel_size=3,stride=1,padding=1)
self.conv3 = nn.Conv2d(512,256,kernel_size=3,stride=1,padding=1)
self.pconv1= nn.Conv2d(256,256, kernel_size=(3,3),stride=1,padding=(1,1))
self.pconv2= nn.Conv2d(256,256, kernel_size=(3,7),stride=1,padding=(1,3))
self.pconv3= nn.Conv2d(256,256, kernel_size=(7,3),stride=1,padding=(3,1))
self.conv4= nn.Conv2d(256,64,kernel_size=3,stride=1,padding=1)
self.conv5= nn.Conv2d(64,1,kernel_size=3,stride=1,padding=1)
def forward(self, x):
x = nnFunctions.relu(self.conv1(x))
x = nnFunctions.relu(self.conv2(x))
x = nnFunctions.relu(self.conv3(x))
#parallel conv
x = nnFunctions.relu(self.pconv1(x)+self.pconv2(x)+self.pconv3(x))
x = nnFunctions.relu(self.conv4(x))
x = nnFunctions.relu(self.conv5(x))
return x
How can I remove the fc
and layer4
?
How can I add the above network to the pretrained resnet50
and also I want to use fine tuning so I want to set require_grad=True
for layer3
i.e last layer after removing fc
and layer4
, how can I do the same