Reuse output of model

def train(input):
    noises=noise_extractor(input)-input
    total_loss=0.0
    for i in range(batch_size):
        for noisys,_ in train_loader:
            noisys=noisys.to(device=device)
            optimizer.zero_grad()
            loss=restomer.loss(noisys+noises[i],noisys)
            loss.backward(retain_graph=True)
            optimizer.step()
            with torch.no_grad():
                total_loss+=loss.sum().detach()#返回tensor,要避免计算图不释放,导致内存溢出
            del loss
    return total_loss
def draw(mean_loss_noisy,mean_loss_clean,noisys,cleans,epoch):
    outs=restomer(noisys)
    writer.add_scalar('noisy_loss',mean_loss_noisy,epoch)
    writer.add_scalar('clean_loss',mean_loss_clean,epoch)
    writer.add_scalar('psnr',psnr(outs,cleans),epoch)
    writer.add_image('noisy',make_grid(convert_to_rgb255(noisys)),epoch)
    writer.add_image('clean',make_grid(convert_to_rgb255(cleans)),epoch)
    writer.add_image('out',make_grid(convert_to_rgb255(outs)),epoch)
def main(): 
    for epoch in tqdm(range(5000)):
        total_loss_noisy,total_loss_clean=0,0
        for noisys,cleans in train_loader:
            noisys=noisys.to(device=device)
            cleans=cleans.to(device=device)
            total_loss_noisy+=train(noisys)
            with torch.no_grad():
                total_loss_clean+=restomer.loss(noisys,cleans).sum().detach()
        with torch.no_grad():
            draw(total_loss_noisy,total_loss_clean,noisys,cleans,epoch)
main()

I want to reuse the output of noise_extractor,how can i achieve it?
The code above hit an error:
one of the variables needed for gradient computation has been modified by an inplace operation: [torch.cuda.FloatTensor [3, 64, 3, 3]] is at version 2; expected version 1 instead. Hint: the backtrace further above shows the operation that failed to compute its gradient. The variable in question was changed in there or anywhere later. Good luck!

Could you describe your use case a bit more and especially how you would like to “reuse” this tensor, please?