How to batch normalization in LSTM

How to batch normalization in LSTM
My code is here…

class LSTM(nn.Module):
    def __init__(self, **model_config):
        super(LSTM, self).__init__()
    
        if model_config['emb_type'] == 'glove' or 'fasttext':
            self.emb = nn.Embedding(model_config['vocab_size'],
                                   model_config['emb_dim'],
                                   _weight = TEXT.vocab.vectors)
        else:
            self.emb = nn.Embedding(model_config['vocab_size'],
                                   model_config['emb_dim'])
        
        self.bidirectional = model_config['bidirectional']
        self.num_direction = 2 if model_config['bidirectional'] else 1
        self.model_type = model_config['model_type']
    
        self.LSTM = nn.LSTM(input_size = model_config['emb_dim'],
                           hidden_size = model_config['hidden_dim'],
                           dropout = model_config['dropout'],
                           bidirectional = model_config['bidirectional'],
                           batch_first = model_config['batch_first'])
    
        self.fc = nn.Linear(model_config['hidden_dim'] * self.num_direction,
                          model_config['output_dim'])
    
        self.drop = nn.Dropout(model_config['dropout'])
    
    def forward(self, x):
        emb = self.emb(x)
        output, (hidden, cell) = self.LSTM(emb)
        last_output = output[:,-1,:]
    
        return self.fc(self.drop(last_output))