How can i solve this error?
RuntimeError: One of the differentiated Tensors appears to not have been used in the graph. Set allow_unused=True if this is the desired behavior.
srun: error: cn31: task 0: Exited with exit code 1
for batch_idx, (x_in,y_in) in enumerate(dataloader):
net2_u.zero_grad()
net2_v.zero_grad()
net2_p.zero_grad()
loss_eqn = criterion(x_in,y_in)
loss_bc = Loss_BC(xb,yb,ub,vb,x,y)
loss = loss_eqn + Lambda_BC* loss_bc
# loss.backward(create_graph=True)
loss.backward()
optimizer_u.step()
optimizer_v.step()
optimizer_p.step()
loss_eqn_a =loss_eqn.detach().cpu().numpy()
loss_eqn_n += loss_eqn_a
loss_bc_a= loss_bc.detach().cpu().numpy()
loss_bc_n += loss_bc_a
n += 1
params, gradsH = get_params_grad(net2_u) #get model parameters and corresponding gradients
params[2].requires_grad = True
gradsH[2].requires_grad = True
tmp_num_data = x_in.size(0)
v = [torch.randn(p.size()).to(device) for p in params] # generate random vector
v = normalization(v) # normalize the vector
THv = [torch.zeros(p.size()) for p in params[2]]
Hv = torch.autograd.grad(gradsH[2],params[2],grad_outputs=torch.ones_like(params[2]), retain_graph = True ,only_inputs=True)[0]
If i set allow_unused=True it prints none.