Invalid argument when adding LSTM layer

Hi,everyone.

i got trouble when adding LSTM layer.
Before adding LSTM i got code like this and it work well.
Here input is 5 numbers like [1,2,3,4,5]

class Net(nn.Module):

    def __init__(self):
        super(Net, self).__init__()
        
        self.fc1 = nn.Linear(5, 3)
        self.fc2 = nn.Linear(3, 1)


    def forward(self, x):
        
        x = F.relu(self.fc1(x))
        x = self.fc2(x)
        return x

net = Net()


#========================================
optimizer = optim.SGD(net.parameters(), lr=0.01)


#===================================
loss_sum1 = 0

# in your training loop:
for i in range(0,len(x_train)):

    optimizer.zero_grad()
    input = Variable(torch.FloatTensor(x_train[i]))#x_train[i] must be [1,2,3,4,5]

    output = net(input)

    y_t = [y_train[i]]
    y = Variable(torch.FloatTensor(y_t)) #y_t must be [1]

    criterion = nn.MSELoss()

    loss = criterion(output, y)

    loss_sum1+=loss

    loss.backward()

    optimizer.step()

but when i add lstm layer and input is [[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5]]

class Net(nn.Module):

    def __init__(self):
        super(Net, self).__init__()
        self.lstm = nn.LSTM(3, 3, 1)
        self.fc1 = nn.Linear(5, 3)
        self.fc2 = nn.Linear(3, 1)

        self.hidden = self.init_hidden()

    def init_hidden(self):
        
        return (autograd.Variable(torch.zeros(1, 1, 3)),
                autograd.Variable(torch.zeros(1, 1, 3)))

    def forward(self, x):
        x, self.hidden = self.lstm(x, self.hidden)
        x = F.relu(self.fc1(x.view(5,-1)))
        x = self.fc2(x)
        return x

Some error apears RuntimeError: invalid argument 2: dimension 1 out of range of 1D tensor

How can I handle it?