Can anyone explain GRU in following code

class RNNnet(nn.Module):
def init(self,input_size,hidden_size,output_size,n_layers=1):
super(self).init()
self.input_size=input_size
self.hidden_size=hidden_size
self.output_size=output_size
self.n_layers=n_layers
self.encoder=nn.Embedding(input_size,hidden_size)
self.gru=nn.GRU(input_size,hidden_size,n_layers)
self.decoder=nn.Linear(hidden_size,output_size)

    def forward(self,input,hidden):
        input=self.encoder(input.reshape(1,-1))
        output,hidden=self.gru(input.view(1,1,-1),hidden)
        output=self.decoder(output.view(1,-1))
        return output,hidden
    
    def init_hidden(self):
        return torch.zeros(self.n_layers,1,self.hidden_size)

What is your question about this module? :slight_smile:
Note, you can find general information in the docs.