@ptrblck, Here is the code
class src_model1(nn.Module):
def __init__(self):
super(src_model1, self).__init__()
resnet = models.resnet18(pretrained=True)
layers = list(resnet.children())
self.features = nn.Sequential(*layers[:4])
def forward(self, x):
return
class dest_model1(nn.Module):
def __init__(self):
super(dest_model1, self).__init__()
resnet = models.resnet18(pretrained=True)
layers = list(resnet.children())
self.features = nn.Sequential(*layers[:4])
def forward(self, x):
return self.features(x)
src_model = src_model1()
src_model = torch.load('src_model.pth')
dest_model = dest_model1()
dest_model.features[0].weight.data=src_model.features[0].weight.data.to(device).type(torch.cuda.FloatTensor)
dest_model.features[1].weight.data=src_model.features[1].weight.data.to(device).type(torch.cuda.FloatTensor)
dest_model.features[1].bias.data=src_model.features[1].bias.data.to(device).type(torch.cuda.FloatTensor)
activation = OrderedDict()
def get_activation(name):
def hook(model, input, output):
activation[name] = output.detach()
return hook
for name, layer in src_model.named_modules():
layer.register_forward_hook(get_activation(name))
output = src_model.forward(samples) # only one sample is given as input
activation1 = OrderedDict()
def get_activation1(name):
def hook1(model, input, output):
activation1[name] = output.detach()
return hook1
for name, layer in dest_model.named_modules():
layer.register_forward_hook(get_activation1(name))
output1 = dest_model.forward(samples) # only one sample is given as input which is same input given to src_model
mae = nn.L1Loss()
for key, key1 in zip(activation, activation1):
print(key, key1)
print(torch.equal(activation[key], activation1[key1]), mae(activation[key], activation1[key1]))
print(activation[key].shape, activation1[key1].shape)
This is the flow, I used,
thanks