Somehow when you combine both 3D input and inplace operation on the tensor, the backward hook seems to not trigger. I fail to understand the reason why this is happening, especially since it seems that 2D input seems okay.
import torch
h = 4
layer = torch.nn.Linear(h, h)
def CaseNot3D():
inp = torch.randn((4, 4))
out = layer(inp)
out.register_hook(lambda grad_output: print('Not 3D Case'))
out += inp
out = out.sum()
out.backward()
def CaseNotInplace():
inp = torch.randn((4, 4, 4))
out = layer(inp)
out.register_hook(lambda grad_output: print('Not Inplace Case'))
out = out + inp
out = out.sum()
out.backward()
def CaseBroken():
inp = torch.randn((4, 4, 4))
out = layer(inp)
out.register_hook(lambda grad_output: print('Broken Case'))
out += inp
out = out.sum()
out.backward()
CaseNot3D()
#PRINTS "Not 3D Case"
CaseNotInplace()
#PRINTS "Not Inplace Case"
CaseBroken()
#DOES NOT PRINT "Broken Case"