How to implement Stateful LSTM

class StatefulLSTM(nn.Module):
    def init(self, num_features, hidden_size=100, hidden_size_lstm=100, num_layers_lstm=3, dropout_lstm=0, batch_size=128):
    super(StatefulLSTM, self).init()
    # Parameters
    self.num_features = num_features
    self.hidden_size = hidden_size
    self.hidden_size_lstm = hidden_size_lstm
    self.num_layers_lstm = num_layers_lstm
    self.batch_size = batch_size
# Representation learning part
    self.lstm = nn.LSTM(num_features, hidden_size_lstm, num_layers_lstm, batch_first=True, dropout=dropout_lstm)

    # Representation to hidden
    self.fc1 = nn.Linear(hidden_size_lstm, hidden_size)
    self.relu = nn.ReLU()

    # Hidden to output
    self.fc2 = nn.Linear(hidden_size, 1)
    self.sigmoid = nn.Sigmoid()

    # Initialize hidden and cell states
    self.hidden = self.init_hidden()

def init_hidden(self):
    # Initialize hidden and cell states with zeros
    h0 = torch.zeros(self.num_layers_lstm, self.batch_size, self.hidden_size_lstm).to(device)
    c0 = torch.zeros(self.num_layers_lstm, self.batch_size, self.hidden_size_lstm).to(device)
    return (h0, c0)

def forward(self, x):

    representation, self.hidden = self.lstm(x.transpose(1, 2), self.hidden.detach())

    hidden_state = self.hidden[0][-1]
    out = self.fc1(hidden_state)
    out = self.relu(out)

    out = self.fc2(out)
    out = self.sigmoid(out)

    return out

I am new to pytorch and experimenting with Stateful. I want to check how hidden state output flows from one batch to another batch. how can the code above be converted into stateful?