Error in multiple inputs multimodel autoencoder

I have followed the code for multiple inputs multimodal autoencoder which is written by @ptrblck in this post “An autoencoder with multiple inputs”. I am getting an error when I write training loop to reconstruct my multiple inputs. @ptrblck , can you please help, how should I write the training loop to reconstruct the multiple inputs which is present in this code:

import torch
import torch.nn as nn
class MyModel(nn.Module):
def init(self):
super().init()
self.encoders = nn.ModuleList()
for _ in range(9):
self.encoders.append(nn.Linear(4, 3))

    self.encoder = nn.Sequential(
        nn.Linear(9*3, 4),
        nn.ReLU(),
        nn.Linear(4, 3)
    )
    self.decoder = nn.Sequential(
        nn.Linear(3, 4),
        nn.ReLU(),
        nn.Linear(4, 9*3)
    )
    self.decoders = nn.ModuleList()
    for _ in range(9):
        self.decoders.append(nn.Linear(3, 4))
    
def forward(self, inputs):
    out = []
    for idx, enc in enumerate(self.encoders):
        out.append(enc(inputs[idx]))
    out = torch.cat(out, dim=1)
    print("out", out)
    z = self.encoder(out)
    out = self.decoder(z)
    out = torch.split(out, 3, dim=1)
    outs = []
    for idx, dec in enumerate(self.decoders):
        outs.append(dec(out[idx]))
    return outs

model = MyModel()
inputs = [torch.randn(1, 4) for _ in range(9)]
outs = model(inputs)
loss_fn = nn.MSELoss()

Create the optimizer

optimizer = torch.optim.Adam(params=model.parameters(), lr=0.01)

torch.manual_seed(42)

epochs = 2

train_loss_values =
test_loss_values =
epoch_count =

for epoch in range(epochs):
### Training
model.train()
inp = inputs
y_pred = model(inp)
for idx, y_pr in enumerate(y_pred):
loss = loss_fn(y_pr, inp[idx])
#print(loss)
optimizer.zero_grad()
loss.backward()
optimizer.step()
train_loss.append(loss)

Error
line 173, in backward
Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass
RuntimeError: Trying to backward through the graph a second time (or directly access saved tensors after they have already been freed). Saved intermediate values of the graph are freed when you call .backward() or autograd.grad(). Specify retain_graph=True if you need to backward through the graph a second time or if you need to access saved tensors after calling backward.