I have defined a custom loss function which depends on the First and second derivatives of the output of the NN with respect to the input.
def PDE(x,y): first_derivative = torch.autograd.grad(y, x, grad_outputs=y.data.new(y.shape).fill_(1), create_graph=True, retain_graph=True) print("dydx: \n", first_derivative) # We now have dy/dx second_derivative = torch.autograd.grad(first_derivative, x, grad_outputs=first_derivative.data.new(first_derivative.shape).fill_(1), create_graph=True, retain_graph=True) print("d2ydx2: \n", second_derivative) # This computes d/dx(dy/dx) = d2y/dx2 eq = 4*first_derivative[:,1] - second_derivative[:,0] loss = torch.mean(eq**2) return loss
When I run
loss.backward() I get an error saying
RuntimeError: leaf variable has been moved into the graph interior
I can’t understand why this is happening.
Here’s my complete code:
# Define the NN model to solve the problem class Model(nn.Module): def __init__(self): super(Model, self).__init__() self.lin1 = nn.Linear(2,10) self.lin2 = nn.Linear(10,1) def forward(self, x): x = torch.sigmoid(self.lin1(x)) x = self.lin2(x) return x model = Model() y = model(X_train) loss = PDE(X_train, y) loss.backward()