Verify my Keras CNN to Pytorch CNN

vocab_size = 4008
x_train_input_shape = (9611, 9200)
max_len = 7200

model = Sequential()
model.add(Embedding(input_dim= vocab_size,
                    output_dim= 100,
                    input_length = max_len))

model.add(Dropout(0.5))
model.add(Conv1D(32, 5, kernel_regularizer=regularizers.l1(0.001), activation='relu'))
model.add(MaxPooling1D(pool_size=4))
model.add(LSTM(100))
model.add(Dense(1,kernel_regularizer=regularizers.l1(0.001), activation='sigmoid'))

print('Ready to compile')

So I’m not looking for a complete Pytorch code but how would I add a MaxPooling1D , Embedding and LSTM layers in a Pytorch Neural Net? Also how would I add regularization to certain layers?

This is what I have so far. I feel like the numbers that I provided are off.

x_train_input_shape = (9611, 9200)
vocab_size = 4008
max_len = 7200


class NeuralNet(nn.Module):
    def __init__(self, max_len, vocab_size, hidden_dim = 100, num_layers = 1):
        super(NeuralNet, self).__init__()
        self.hidden_dim = hidden_dim
        self.batch_size = 4008
        self.num_layers = num_layers
        self.emb = nn.Embedding(vocab_size, max_len)
        self.drop = nn.Dropout(0.5)
        self.conv = nn.Conv1d(max_len, 32, kernel_size = 5)
        self.pool = nn.MaxPool1d(2)
        self.lstm = nn.LSTM(32, hidden_dim)
        self.out = nn.Linear(hidden_dim, 1)
        self.rel = nn.ReLU()
        self.sig = nn.Sigmoid()

    def init_hidden(self):
        return (torch.zeros(self.num_layers, self.batch_size, self.hidden_dim),
                torch.zeros(self.num_layers, self.batch_size, self.hidden_dim))

    def forward(self,x):
        x = self.emb(x)
        x = self.drop(x)
        x = self.rel(self.conv(x))
        x = self.pool(x)
        lstm_out, _ = self.lstm(x.view(len(x), self.batch_size, -1))
        output = self.sig(self.out(lstm_out[:, -1, :]))
        return output