Anyone, Please Help how can I use multiple LSTM layer
[NOTE: LSTM 1 and 2 are commented because when I try to add I face dimension problem ]
class LSTMnetwork(nn.Module):
def __init__(self,input_size=1,hidden_size=100,output_size=1):
super().__init__()
self.hidden_size = hidden_size
# Add an LSTM layer:
self.lstm = nn.LSTM(input_size,hidden_size)
#self.lstm1 = nn.LSTM(hidden_size,200)
#self.lstm2 = nn.LSTM(200,300)
self.linear = nn.Linear(hidden_size,output_size)
self.hidden = (torch.zeros(1,1,self.hidden_size),
torch.zeros(1,1,self.hidden_size))
def forward(self,seq):
lstm_out, self.hidden = self.lstm(seq.view(len(seq),1,-1), self.hidden)
#lstm_out, self.hidden = self.lstm1(lstm_out, self.hidden)
#lstm_out, self.hidden = self.lstm2(lstm_out, self.hidden)
pred = self.linear(lstm_out.view(len(seq),-1))
return pred[-1] # we only want the last value