How can I add hook in more than one layer?

t_FM = []
s_FM = []
hooks = []

def hook1(module1, input, output):
    print("hooker Working")
    t_FM.append(output)

hooks.append(t_model.tbackbone.layer2[1].conv1.register_forward_hook(hook1))

def hook2(module2, input, output):
    print("hooker Working")
    s_FM.append(output)

hooks.append(s_model.sbackbone.layer2[1].conv1.register_forward_hook(hook2))

your_model(input_tensors)  # forward with firing all the hooks registered
for hook_handle in hooks:
    hook_handle.remove()

Hello again :grinning:.

Try to attach the hook to the module you want, after registering it in t_model's __init__.

1 Like

Or, you can use nn.Module.apply method:

with torch.no_grad():
    def add_hook(module):
        def hook1(module, input, output):
            print("hooker Working")
            t_FM.append(output)
        
        if module is t_model.tbackbone.layer2[1].conv1:
            hooks.append(module.register_forward_hook(hook1))

        def hook2(module, input, output):
            print("hooker Working")
            s_FM.append(output)
    
        if module is s_model.sbackbone.layer2[1].conv1:
            hooks.append(module.register_forward_hook(hook2))

    your_model.eval()
    your_model.apply(add_hook)
    your_model(input_tensors)  # forward with firing all the hooks registered
    for hook_handle in hooks:
        hook_handle.remove()
1 Like

Thank you very much for your time. :pray: