Nested Models or Combining RNN and linear model

Hi,

I am trying to build a model that can both handel string and float values.

My plan is to turn the strings, character wise in to a one hot encoded tensors which then could be fed to a RNN. Afterwards that result would be merged with the other float values in a regular linear network.

But I seem to be getting a bit confused on how to implement it correctly.
Looking forward to your advice.

This is what I have so far:

class NeuralNet(nn.Module):
    def __init__(self, input_size_int, input_size_str, hidden_size_rnn, hidden_size_ln):
        super(NeuralNet, self).__init__()

        self.num_layers = 5
        self.hidden_size_rnn =hidden_size_rnn
        self.rnns = nn.ModuleList()
        self.fully_connected = nn.ModuleList()

        self.h = []

        for i in range(input_size_str):
            self.rnns.append(nn.RNN(input_size=len(str_m.ALL_LETTERS) ,hidden_size=hidden_size_rnn, num_layers=self.num_layers, batch_first=True))
            self.fully_connected.append(nn.Linear(hidden_size_rnn, 1))

        self.input_size = input_size_int + input_size_str


        self.l1 = nn.Linear(input_size_int + input_size_str, hidden_size_ln)
        self.l2 = nn.Linear(hidden_size_ln, 2)  
    
    def forward(self, x1,  s):

        self.hidden = [None for _ in range(len(self.rnns))]
        x2 = []

        for idx, rnn in enumerate(self.rnns):
            self.hidden[idx] = torch.zeros(self.num_layers, s[idx].size()[0], self.hidden_size_rnn).to(device)

            out_rnn, _ = rnn(s[idx],self.hidden[idx])
            # out: batch_size, seq_legnth, hidden_size

            out_rnn = out_rnn[-1::]

            out_rnn = self.fully_connected[idx](out_rnn)
            x2.append(out_rnn.reshape(-1))

        # Merge X1 and X2
        x = torch.cat([x1, torch.cat(x2)])

        out = F.relu(self.l1(x))
        out = F.relu(self.l2(out))
        
        return out