Hello, I use the backward to solve the grad, the demo code is as follows:
import torch
def angle2matrix(angles, device=torch.device('cpu')):
angles = angles/180*3.1415926
x = angles[0]
y = angles[1]
z = angles[2]
# x
Rx = torch.tensor([[1, 0, 0],
[0, torch.cos(x), -torch.sin(x)],
[0, torch.sin(x), torch.cos(x)]]).to(device)
# y
Ry = torch.tensor([[torch.cos(y), 0, torch.sin(y)],
[0, 1, 0],
[-torch.sin(y), 0, torch.cos(y)]]).to(device)
# z
Rz = torch.tensor([[torch.cos(z), -torch.sin(z), 0],
[torch.sin(z), torch.cos(z), 0],
[0, 0, 1]]).to(device)
R = Rz.mm(Ry.mm(Rx))
return R
x=torch.ones(3,requires_grad=True)
y=torch.pow(angle2matrix(x),2)
y.sum().backward()
print(x.grad)
then I got the error as follows:
Traceback (most recent call last):
File "app/transform.py", line 110, in <module>
y.sum().backward()
File "/root/anaconda3/lib/python3.7/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/root/anaconda3/lib/python3.7/site-packages/torch/autograd/__init__.py", line 100, in backward
allow_unreachable=True) # allow_unreachable flag
RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn
then I change the angle2matrix
as follows:
def angle2matrix(angles, device=torch.device('cpu')):
angles = angles/180*3.1415926
x = angles[0]
y = angles[1]
z = angles[2]
# x
Rx = torch.tensor([[1, 0, 0],
[0, torch.cos(x), -torch.sin(x)],
[0, torch.sin(x), torch.cos(x)]]).to(device)
# y
Ry = torch.tensor([[torch.cos(y), 0, torch.sin(y)],
[0, 1, 0],
[-torch.sin(y), 0, torch.cos(y)]]).to(device)
# z
Rz = torch.tensor([[torch.cos(z), -torch.sin(z), 0],
[torch.sin(z), torch.cos(z), 0],
[0, 0, 1]]).to(device)
Rx.requires_grad=True
Ry.requires_grad=True
Rz.requires_grad=True
R = Rz.mm(Ry.mm(Rx))
return R
x=torch.ones(3,requires_grad=True)
y=torch.pow(angle2matrix(x),2)
y.sum().backward()
print(x.grad)
but this time I the grad of x
is none.
looking forward to your help !