How to run a backward in a backward hook

import torch
import torch.nn as nn

def hook_out(module, grad_in, grad_out):
    print("backward hook out")

    def hook_in(module, grad_in, grad_out):
        print("backward hook in")
    model_in = nn.Conv2d(3, 3, 1, 1)
    input_in = torch.randn([1, 3, 16, 16])
    model_in(input_in).sum().backward()

model_out = nn.Conv2d(3, 3, 1, 1)
input_out = torch.randn([1, 3, 16, 16])
back_hook_out = model_out.register_backward_hook(hook_out)
model_out(input_out).sum().backward()

I get this erro

Traceback (most recent call last):
  File "regist_bh.py", line 21, in <module>
    model_out(input_out).sum().backward()
  File "/data/application/xiw/anaconda3/envs/pytorch/lib/python3.8/site-packages/torch/_tensor.py", line 363, in backward
    torch.autograd.backward(self, gradient, retain_graph, create_graph, inputs=inputs)
  File "/data/application/xiw/anaconda3/envs/pytorch/lib/python3.8/site-packages/torch/autograd/__init__.py", line 173, in backward
    Variable._execution_engine.run_backward(  # Calls into the C++ engine to run the backward pass
  File "regist_bh.py", line 16, in hook_out
    model_in(input_in).sum().backward()
  File "/data/application/xiw/anaconda3/envs/pytorch/lib/python3.8/site-packages/torch/_tensor.py", line 363, in backward
    torch.autograd.backward(self, gradient, retain_graph, create_graph, inputs=inputs)
  File "/data/application/xiw/anaconda3/envs/pytorch/lib/python3.8/site-packages/torch/autograd/__init__.py", line 173, in backward
    Variable._execution_engine.run_backward(  # Calls into the C++ engine to run the backward pass
RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn

The inputs you are getting your hook don’t require grad, because grad has been disabled. Your outer backward needs to run with create_graph=True for gradients to be enabled during backward.