Differentiable Optimizer Not Working For Simple Example

import torch
import torch.nn as nn

class Net(torch.nn.Module):

    def __init__(self):
        super().__init__()
        self.device = (
        "cpu"
        )
        self.Initialize()

    def Initialize(self):
        self.FC = nn.Sequential(
                nn.LayerNorm(10),
                nn.Linear(10, 5),).to(self.device)
    
        
    def Inference(self, s):
        ou = self.FC(s)
        return ou


    def forward(self, s):
        return self.Inference(s)


model = Net()
model.train()

optimizer = torch.optim.Adam(model.parameters(), lr=0.001, differentiable=True)

z = torch.randn((10,10))

a = model(z)

loss = (a - 1).mean()

optimizer.zero_grad()
loss.backward()
optimizer.step()

As above.

Error produced:
File “…\PythonSoftwareFoundation.Python.3.9_qbz5n2kfra8p0\LocalCache\local-packages\Python39\site-packages\torch\optim\adam.py”, line 413, in single_tensor_adam
param.addcdiv
_(exp_avg, denom)
RuntimeError: a leaf Variable that requires grad is being used in an in-place operation.

Pytorch 2.4