The code snippet is below.
import torch
from torch.autograd.function import Function
class MyCalc(Function):
@staticmethod
def forward(ctx, x):
res = x * x + 2 * x
ctx.res = res
return res
@staticmethod
def backward(ctx, grad_output):
print(grad_output)
return grad_output
x = torch.tensor([2.], requires_grad=True)
res = MyCalc.apply(x)
print(res)
res.backward()
print(x.grad)
print(grad_output)=1
, I want to know what it means.