class layer(torch.autograd.Function):
@staticmethod
def forward(ctx, input,factors, bias=None):
ctx.save_for_backward(input, factors, bias)
print('fwd')
output = .....
return output
@staticmethod
def backward(ctx, grad_output):
# input, weight, bias = ctx.input, ctx.weight, ctx.bias
grad_input = grad_weight = grad_bias = None
print('enter here!') # <<<<<<<<<<<<<<<-------------------This does not printed!!!!
grads, dx = bwd(factors, grad_output, input)
if ctx.needs_input_grad[0]:
grad_input = dx
if ctx.needs_input_grad[1]:
grad_weight = grads
if bias is not None and ctx.needs_input_grad[2]:
grad_bias = grad_output.sum(0)
return grad_input, grad_weight, grad_bias
As shown above, the print does not work!!! But the forward’s print can work!!! Why???
Thank you!!!