LSTMs producing same output for different batches of data

I am trying to design a Auto-Encoder model to convert my discrete my one-hot Encoded data into continuous data (more like some soft-max values . Imagine my training data is in dimension [64,10,98] where 64 is the batch size, 10 is the sequence length and 98 is the feature size . The code for Auto-Encoder i have used :
import torch
import torch.nn as nn

class EncoderDecoderModel(nn.Module):
def init(self, input_size, hidden_size):
super(EncoderDecoderModel, self).init()

    self.encoder_lstm = nn.LSTM(input_size, hidden_size,num_layers = 10,batch_first=True)

  
    self.decoder_lstm = nn.LSTM(hidden_size, input_size, num_layers = 10,batch_first=True)

def forward(self, input_tensor):
  
    oru, (encoder_hidden, cell) = self.encoder_lstm(input_tensor)

 
    decoder_output, _ = self.decoder_lstm(oru)

    return decoder_output

input_size = 98
hidden_size = 512
batch_size = 64
sequence_length = 10

model = EncoderDecoderModel(input_size, hidden_size)

input_tensor = data_one_hot.float()

output_tensor = model(input_tensor)
print(output_tensor.shape)

The problem is that it is producing the same output for different batches of data in the training loop .

Training Loop :
from torch.autograd import Variable
import torch.nn.functional as F
iters = 200000
model.train()
for iter in range(iters):
data = next(pass_next)
data = data.astype(np.float32)
data_tensor = Variable(torch.tensor(data,requires_grad=True).long())
data_one_hot = Fun.one_hot(data_tensor,num_classes=len(charmap))
output = model(data_one_hot.float())
ae_optim.zero_grad()
loss = F.cross_entropy(data_one_hot.float(),output)
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(),1.0)
ae_optim.step()

if iter%10==0 and iter>0:

print("********************************")
samples = torch.argmax(output,dim=2)
# samples = decode_passwords(samples,inv_charmap)
# output = decode_passwords(samples,inv_charmap)
print(" samples : ",samples)
print("iter :",iter)
print("loss :",loss)
print("********************************")

Please Help !