Implementation of a class without modulelist

The following are the 2 of the implementation of the same code:

class ResidualNetwork(nn.Module):

    def __init__(self, input_dim, output_dim, hidden_layer_dims):
        """

        Inputs

        ----------

        input_dim: int

        output_dim: int

        hidden_layer_dims: list(int)

        """

        super(ResidualNetwork, self).__init__()

        dims = [input_dim]+hidden_layer_dims

        self.fcs = nn.ModuleList([nn.Linear(dims[i], dims[i+1])

                                  for i in range(len(dims)-1)])

        self.res_fcs = nn.ModuleList([nn.Linear(dims[i], dims[i+1], bias=False)

                                      if (dims[i] != dims[i+1])

                                      else nn.Identity()

                                      for i in range(len(dims)-1)])

        self.acts = nn.ModuleList([nn.ReLU() for _ in range(len(dims)-1)])

        self.fc_out = nn.Linear(dims[-1], output_dim)

    def forward(self, fea):

        for fc, res_fc, act in zip(self.fcs, self.res_fcs, self.acts):

            fea = act(fc(fea))+res_fc(fea)

        return self.fc_out(fea)

out_hidden = [1024,1024,1024,1024, 512,512,512, 256,256,256, 128,128,128, 64,64, 32]

model = ResidualNetwork(271, 1, out_hidden)

and

class ResidualNetwork(nn.Module):

    """

    Feed forward Residual Neural Network

    """

    def __init__(self, input_dim, output_dim):

        """

        Inputs

        ----------

        input_dim: int

        output_dim: int

        hidden_layer_dims: list(int)

        """

        super(ResidualNetwork, self).__init__()

        self.fcs1 = nn.Linear(input_dim, 1024)
        self.fcs2 = nn.Linear(1024, 1024)
        self.fcs3 = nn.Linear(1024, 1024)
        self.fcs4 = nn.Linear(1024, 1024)
        self.fcs5 = nn.Linear(1024, 512)
        self.fcs6 = nn.Linear(512, 512)
        self.fcs7 = nn.Linear(512, 512)
        self.fcs8 = nn.Linear(512, 256)
        self.fcs9 = nn.Linear(256, 256)
        self.fcs10 = nn.Linear(256, 256)
        self.fcs11 = nn.Linear(256, 128)
        self.fcs12 = nn.Linear(128, 128)
        self.fcs13 = nn.Linear(128, 128)
        self.fcs14 = nn.Linear(128, 64)
        self.fcs15 = nn.Linear(64, 64)
        self.fcs16 = nn.Linear(64, 32)

        self.fcs_out = nn.Linear(32, output_dim)

        self.act = nn.ReLU()

        self.res_fcs1 = nn.Linear(input_dim, 1024)
        self.res_fcs2 = nn.Identity()
        self.res_fcs3 = nn.Identity()
        self.res_fcs4 = nn.Identity()
        self.res_fcs5 = nn.Linear(1024, 512)
        self.res_fcs6 = nn.Identity()
        self.res_fcs7 = nn.Identity()
        self.res_fcs8 = nn.Linear(512, 256)
        self.res_fcs9 = nn.Identity()
        self.res_fcs10 = nn.Identity()
        self.res_fcs11 = nn.Linear(256, 128)
        self.res_fcs12 = nn.Identity()
        self.res_fcs13 = nn.Identity()
        self.res_fcs14 = nn.Linear(128, 64)
        self.res_fcs15 = nn.Identity()
        self.res_fcs16 = nn.Linear(64, 32)

        

    def forward(self, fea):

        fea1 = self.act(self.fcs1(fea)) + self.res_fcs1(fea)             #1024
        fea2 = self.act(self.fcs2(fea1)) + self.res_fcs2(fea1)         #1024
        fea3 = self.act(self.fcs3(fea2)) + self.res_fcs3(fea2)         #1024
        fea4 = self.act(self.fcs4(fea3)) + self.res_fcs4(fea3)         #1024
        fea5 = self.act(self.fcs5(fea4)) + self.res_fcs5(fea4)         #512
        fea6 = self.act(self.fcs6(fea5)) + self.res_fcs6(fea5)         #512
        fea7 = self.act(self.fcs7(fea6)) + self.res_fcs7(fea6)         #512
        fea8 = self.act(self.fcs8(fea7)) + self.res_fcs8(fea7)         #256
        fea9 = self.act(self.fcs9(fea8)) + self.res_fcs9(fea8)         #256
        fea10 = self.act(self.fcs10(fea9)) + self.res_fcs10(fea9)      #256
        fea11 = self.act(self.fcs11(fea10)) + self.res_fcs11(fea10)    #128
        fea12 = self.act(self.fcs12(fea11)) + self.res_fcs12(fea11)    #128
        fea13 = self.act(self.fcs13(fea12)) + self.res_fcs13(fea12)    #128
        fea14 = self.act(self.fcs14(fea13)) + self.res_fcs14(fea13)    #64
        fea15 = self.act(self.fcs15(fea14)) + self.res_fcs15(fea14)    #64
        fea16 = self.act(self.fcs16(fea15)) + self.res_fcs16(fea15)    #32            

        return self.fcs_out(fea16)                                      #1

model = ResidualNetwork(271, 1)

Will these 2 classes produce the exact same neural network or are there something missing in the latter class? Also, do I need to create multiple Relu or will it work by creating only one?

Hi,

Both will work quite fine. Usually people like to pack many modules in a list for convenience but you don’t have to.
For stateless modules like relu, you can reuse it yes!

By work quite fine, do you mean that they will produce the same neural network, right?

Yes they will create the same network as long as the forward pass is doing the same computation.