hi trying to append transpose layer to Resnet 50 but kernel dies every time i build the model
class UnetBlock(nn.Module):
def init(self, up_in, x_in, n_out):
super().init()
x_out = 128
up_out=128
self.x_conv = nn.Conv2d(x_in, x_out, 1)
self.tr_conv = nn.ConvTranspose2d(up_in, up_out, 2, stride=2)
self.bn = nn.BatchNorm2d(x_out+up_out)
def forward(self, up_p, x_p):
up_p = self.tr_conv(up_p)
x_p = self.x_conv(x_p)
cat_p = torch.cat([up_p,x_p], dim=1)
return self.bn(F.relu(cat_p))
class Resnet4Channel(nn.Module):
def init(self, encoder_depth=34, pretrained=True, num_classes=28):
super().init()
encoder = resnet50(pre=true)
w = encoder.conv1.weight
self.conv1 = nn.Conv2d(4, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.conv1.weight = torch.nn.Parameter(torch.cat((w, 0.5*(w[:,:1,:,:]+w[:,2:,:,:])),dim=1))
self.bn1 = encoder.bn1
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = encoder.layer1
self.layer2 = encoder.layer2
self.layer3 = encoder.layer3
self.layer4 = encoder.layer4
self.avgpool = encoder.avgpool
#self.x_unet = UnetBlock(512,256,256) #unet
#self.l =nn.Linear(256, 512)
self.fc = nn.Linear(512 * (1 if encoder_depth==34 else 4), num_classes)
class SaveFeatures():
features=None
def __init__(self, m):
self.hook = m.register_forward_hook(self.hook_fn)
def hook_fn(self, module, input, output):
self.features = output
def remove(self): self.hook.remove()
def forward(self, x):
self.new_classifier = nn.Sequential(*list(encoder.children())[:-1]) #check
self.sfs = [SaveFeatures(self.new_classifier[4])] #check
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = self.x_unet(x, self.sfs[0].features) #check
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
please let me know where m i wrong…
i have chosen index as 4 because when i describe resenet 50 then layer 4 give output of 256 activations concatenated with deconv to give final output of 512 ,which is what goes to classfication layer…