How can I pass parameters of LSTMcells to optimizer?

I have the model in which I have multiple LSTM cells.

class Seq(nn.Module):
    def __init__(self, num_hidden, num_cells, device=None):
        """
        Initialize the LSTM predictor
        :param num_hidden: Number of hidden units of LSTM
        :param num_cells: Number of LSTM cells in the NN, equivalent to number of layers
        """
        super(Seq, self).__init__()
        self.num_cells = num_cells
        self.num_hidden = num_hidden
        self.cell_list = []
        if device is None:
            if self.num_cells > 5 and self.num_hidden > 51:
                self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
            else:
                self.device = "cpu"
        else:
            if device == "gpu":
                self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
            else:
                self.device = "cpu"
        for i in range(0, num_cells):
            if i == 0:
                self.cell_list.append((nn.LSTMCell(1, num_hidden).double()).to(self.device))
            else:
                self.cell_list.append((nn.LSTMCell(num_hidden, num_hidden).double()).to(self.device))
        self.linear = nn.Linear(num_hidden, 1)

When I check the parameters of the model I only get linear layer weights and bias. Will I need to concatenate the parameters for each LSTMcell and then pass it to optimizer?