I am getting grad value of None for the following two variables after backward pass. I understand that they may not be leaf variables, so I called retain_grad() on them before calling backward but it did not help.
ranked_tensor[idx] # Variable containing: [torch.cuda.FloatTensor of size 1 (GPU 0)]
ranked_tensor[nonzeroidx.data[0][0]]. # Variable containing: [torch.cuda.FloatTensor of size 1 (GPU 0)]
for idx in range(nonzeroidx.data[0][0]-1, -1, -1):
logistic = self.computelogistic(
ranked_tensor[nonzeroidx.data[0][0]],
ranked_tensor[idx])
lambdaij = 1.0/(idx+1) - 1.0/(nonzeroidx.data[0][0]+1)
logistic *= lambdaij
ranked_tensor[nonzeroidx.data[0][0]].retain_grad()
h = ranked_tensor[nonzeroidx.data[0][0]].register_hook(lambda grad:
grad * logistic)
ranked_tensor[nonzeroidx.data[0][0]].backward(
retain_graph=True)
h.remove()
ranked_tensor[idx].retain_grad() #ranked_tensor[idx].require_grad is True
h = ranked_tensor[idx].register_hook(lambda grad:
grad * -logistic)
ranked_tensor[idx].backward(retain_graph=True) #ranked_tensor[idx].grad is None!!
h.remove()