torch.jit.Attribute don't keep attribute

Hi,

I have an attribute I would like to keep in my class (see below).
To keep it after scripting my model (module = jit.script(module)), I wrapped it with torch.jit.Attribute but if I do that, torch can’t find this attribute while scripting it (see error below).

Code

class LSTM(nn.Module):
    def __init__(self, input_size, output_size, **kwargs):
        super(LSTM, self).__init__()

        logger.debug("Initializing LSTM")

        bidirectional = kwargs.get('bidirectional', False)
        self.__num_internal_layers = torch.jit.Attribute(2 if bidirectional else 1, int) # # I defined it here

        # [...]

        self.l_lstm = nn.LSTM(
            input_size=input_size,
            hidden_size=self.lstm_hidden_size,
            num_layers=self.lstm_num_layers,
            batch_first=True,
            bidirectional=bidirectional,
            device=self.device
        )

         # [...]

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        """
        Forward pass

        params:
            x:      tensor of shape (B, N, X) where B is the batch_size, N the sequence and X the data itself

        returns:
            y:      tensor of shape (B, N, Y) where Y is the output_size
        """

        h_0 = torch.zeros(self.lstm_num_layers * self.__num_internal_layers.value, x.size(0), self.lstm_hidden_size, device=self.device) # I use it here
        c_0 = torch.zeros(self.lstm_num_layers * self.__num_internal_layers.value, x.size(0), self.lstm_hidden_size, device=self.device) # I use it here

        # [...]

Error

Module 'LSTM' has no attribute '__num_internal_layers' :
  File "/home/thytu/Prog/Blackfoot/herding-cats-AI/src/LSTM.py", line 77
        """
    
        h_0 = torch.zeros(self.lstm_num_layers * self.__num_internal_layers.value, x.size(0), self.lstm_hidden_size, device=self.device)
                                                 ~~~~~~~~~~~~~~~~~~~~~~~~~~ <--- HERE
        c_0 = torch.zeros(self.lstm_num_layers * self.__num_internal_layers.value, x.size(0), self.lstm_hidden_size, device=self.device)

Do you have any idea why?

Double underscores have a special meaning in Python and are used as private variables.
They are not accessible via their attribute name, as the _CLASSNAME will be appended to them as seen here:

class MyClass(object):
    def __init__(self):
        self.my_attr = 1
        self.__my_private_attr = 2
        
m = MyClass()
print(m.my_attr)
# > 1
print(m.__my_private_attr)
# > AttributeError: 'MyClass' object has no attribute '__my_private_attr'
print(m._MyClass__my_private_attr)
# > 2

I’m not familiar with your use case, but you could remove the double underscores and also the .value call:

class LSTM(nn.Module):
    def __init__(self, input_size, output_size, **kwargs):
        super(LSTM, self).__init__()
        bidirectional = kwargs.get('bidirectional', False)
        self.num_internal_layers = torch.jit.Attribute(2 if bidirectional else 1, int) # # I defined it here

        self.l_lstm = nn.LSTM(
            input_size=input_size,
            hidden_size=1,
            num_layers=1,
            batch_first=True,
            bidirectional=bidirectional,
            device='cpu'
        )
        
    def forward(self, x: torch.Tensor) -> torch.Tensor:
        h_0 = torch.zeros(1 * self.num_internal_layers, x.size(0), 1, device='cpu') # I use it here
        c_0 = torch.zeros(1 * self.num_internal_layers, x.size(0), 1, device='cpu') # I use it here
        return h_0
        
model = torch.jit.script(LSTM(1, 1))