RuntimeError: Trying to backward through the graph a second time. Specify retain_graph=True if you need to backward through the graph a second time or if you need to access saved tensors after calling backward

Here is the forward code, Im calculating a custom loss function in the Model class and returning the loss value, what are the in-place operations am i doing that is causing these errors, where can i fix them.

class MatrixModel(nn.Module):
  def __init__(self, num_matrices=10,layers = 10,img_shape=(32,32),lamda=1.0) -> None:
    super().__init__()
    self.l = lamda
    modules = []
    self.W = torch.randn((num_matrices,img_shape[1],img_shape[1]))
    for i in range(layers):
      modules.append(nn.Conv2d(num_matrices,num_matrices,kernel_size=3,stride=1,padding=1))
    self.model = nn.Sequential(*modules)
    modules = self.model.named_children()
    for name, module in modules:
      # module.register_full_backward_hook(backward_hook_fn)
      module.register_forward_hook(lambda m, input, output: torch.nan_to_num(output,nan=0.0))


  def calculate_loss(self,X,X_w,W,lamda=1.0):
    X_w = X - X_w
    X_w = torch.einsum("ijk,ijk->ijk",X_w,X_w)

    X_w = torch.nan_to_num(X_w)
    X_w = torch.einsum("ijk->",X_w)
    X_w = 0.5*X_w.clone()

    W = torch.einsum("ijk,ijk->ijk",W,W)

    W = torch.einsum("ijk->jk",W)
    W = torch.sqrt(W+1e-6)
    W = torch.einsum("jk->",W)
    W = lamda*W.clone()
    return W + X_w

  def forward(self,x):
    W = self.W.clone()
    W = torch.unsqueeze(W,0)
    W = torch.nan_to_num(W,nan=0.0)
    W = W.to(device)

    W = self.model(W)

    x = torch.mean(x,dim=1)
    x_expanded = x.unsqueeze(1)

    X_w = torch.matmul(x_expanded, W)
    X_w = torch.nan_to_num(X_w)
    X_w = torch.sum(X_w,dim=1)

    X_w = torch.nan_to_num(X_w)
    W = torch.squeeze(W)
    loss = self.calculate_loss(x,X_w,W,self.l)
    self.W = W.clone()
    return loss

It seems you are assigning the model output to self.W and are then reusing it. You might need to .detach() the tensor before assigning it instead of cloning.