GRU cannot update hidden_state in-place

I got a problem when using pytorch to implement a GRU network:

My codes are as below:

import torch

class GRU_model(torch.nn.Module):
    def __init__(self, device):
        super(GRU_model, self).__init__()
        self.h = torch.randn((1,1,5), device=device, dtype=torch.float)

        self.GRU_1 = torch.nn.GRU(input_size=5, hidden_size=5)

    def forward(self, a):
        output, self.h = self.GRU_1(a, self.h)
        return output

if __name__ == '__main__':
    learn_rate=1e-4
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model = GRU_model(device).to(device=device)
    optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate)

    for i in range(10):
        a = torch.randn((1, 1, 5), device=device, dtype=torch.float)
        output = model(a)
        loss = (a - output).mean()

        optimizer.zero_grad()
        loss.backward(retain_graph=True)
        optimizer.step()

and I got an error like this:

Traceback (most recent call last):
  File "C:/Users/Administrator_/Desktop/Graduation_Project/MIDI_Music_style_transfer/GRU_toy_in-place_hidden_states_change/main.py", line 40, in <module>
    loss.backward(retain_graph=True)
  File "C:\Users\Administrator_\AppData\Local\Programs\Python\Python37\lib\site-packages\torch\tensor.py", line 245, in backward
    torch.autograd.backward(self, gradient, retain_graph, create_graph, inputs=inputs)
  File "C:\Users\Administrator_\AppData\Local\Programs\Python\Python37\lib\site-packages\torch\autograd\__init__.py", line 147, in backward
    allow_unreachable=True, accumulate_grad=True)  # allow_unreachable flag
RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.cuda.FloatTensor [15, 5]] is at version 2; expected version 1 instead. Hint: enable anomaly detection to find the operation that failed to compute its gradient, with torch.autograd.set_detect_anomaly(True).

I just want to update the hidden_state in the GRU after an epoch but it just doesn’t work!

I’d be really appreciated if you could me help out!