Hi,

I am trying some experiments on autograd and found this following small example gives a RuntimeError: One of the differentiated Tensors given as ‘inputs’ to backward is not a leaf Tensor.

However, printing `x.is_leaf`

shows `True`

. So why is this error thrown?

```
import torch
import torch.nn as nn
import torch.nn.functional as F
class TestModule(nn.Module):
def __init__(self, input_size, output_size):
super().__init__()
internal_size = 5
self.linear1 = nn.Linear(input_size, internal_size)
self.linear2 = nn.Linear(internal_size, output_size)
self.model = nn.Sequential(self.linear1, nn.ReLU(), self.linear2)
def forward(self, x):
x = self.model(x)
return x
model = TestModule(4, 3)
x, y = torch.rand(size=(20, 4)), torch.randint(low=0, high=3, size=(20,))
x.requires_grad = True
loss = F.cross_entropy(model(x), y)
print(x.is_leaf)
torch.autograd.backward(loss, create_graph=False, inputs=x)
```