Before posting a query, check the FAQs - it might already be answered!
Hi everyone,
I am trying to build a bidirectional LSTM encoder decoder to predict RUL for CMAPPS dataset, while executing the code i am getting this error. and i cant figure it out. I have tried batch_first=True but it doesnt work. can anyone help? Here is my code:
-- coding: utf-8 --
import torch
import torch.nn as nn
device = torch.device(‘cuda’ if torch.cuda.is_available() else ‘cpu’)
class Encoder_LSTM(nn.Module):
“”"
Class for implementing the Bidirectional LSTM Encoder Structure.
Parameters
----------
input_dim : int
No. of features in Input Data
enc_dim : int
Hidden Dimension Size for Encoder Cell.
dec_dim : int
Hidden Dimension Size for Decoder Cell.
ablation : str
Specifies the type of LSTM Encoder-Decoder architecture.
bidirectional : bool, optional
If True, implements a Bidirectional LSTM Structure. Else, a Unidirectional LSTM.
Default : True
Returns
-------
output : torch.Tensor
Encoder Hidden Representations.
hidden : torch.Tensor
Hidden State of Final Encoder LSTM Cell.
cell : torch.Tensor
Cell State of Final Encoder LSTM Cell.
"""
def __init__(self, input_dim, enc_dim,dec_dim, num_layers=2, ablation=None, bidirectional=True):
super(Encoder_LSTM, self).__init__()
self.input_dim = input_dim
self.enc_dim= enc_dim
self.dec_dim = dec_dim
self.num_layers = num_layers
# define LSTM layer
self.lstm = nn.LSTM(input_size = input_dim, hidden_size = enc_dim, num_layers=num_layers, batch_first=True)
self.fc_hidden = nn.Linear(enc_dim*2, dec_dim)
self.fc_cell = nn.Linear(enc_dim*2, dec_dim)
def forward(self, src):
output, (hidden,cell) = self.lstm(src)
hidden = self.fc_hidden(torch.cat((hidden[0:1],hidden[1:2]),dim=2))
cell = self.fc_cell(torch.cat((cell[0:1],cell[1:2]),dim=2))
return output, (hidden, cell)
class Decoder_LSTM(nn.Module):
“”"
Class for implementing a Unidirectional LSTM Decoder Cell.
Parameters
----------
output_dim : int
No. of features in Output Data from instantaneous Decoder Cell.\n
Default : 1 for RUL.
enc_dim : int
Hidden Dimension Size for Encoder Cell.
dec_dim : int
Hidden Dimension Size for Decoder Cell.
Returns
-------
final_prediction : torch.Tensor
Output from LSTM Decoder Cell.
hidden : torch.Tensor
Hidden State of instantaneous Decoder LSTM Cell.
cell : torch.Tensor
Cell State of instantaneous Decoder LSTM Cell.
"""
def __init__(self, output_dim, enc_dim, dec_dim):
super(Decoder_LSTM, self).__init__()
self.output_dim= output_dim
self.dec_dim = dec_dim
self.lstm = nn.LSTM(input_size= enc_dim*2, hidden_size = dec_dim, batch_first=True)
self.relu = nn.ReLU()
self.linear = nn.Linear(dec_dim, output_dim)
def forward(self, dec_input, hidden,cell):
output, (hidden,cell) = self.lstm(dec_input, (hidden,cell))
out = self.relu(output)
final_prediction = self.linear(out)
return final_prediction, hidden, cell