Is it necessary to construct the network model in an explicit way?

Hi, folks.

Today I implemented a simple neural network that has a bottom-up recognition process and a top-down generative process. What I am confused is that if I define the model in an explicit fashion, like

import torch.nn as nn
import torch.nn.functional as F


class RECOGNITION(nn.Module):
    def __init__(self, input_size_rec, num_hidden_units_rec, num_class):
        super(RECOGNITION, self).__init__()
        self.rec_fc1 = nn.Linear(input_size_rec[0], num_hidden_units_rec[0])
        self.rec_fc2 = nn.Linear(input_size_rec[1], num_hidden_units_rec[1])
        self.rec_fc3 = nn.Linear(num_hidden_units_rec[-1], num_class)

    def forward(self, x):
        z_rec_all = []

        z1_rec = F.relu(self.rec_fc1(x))
        z_rec_all.append(z1_rec)

        z2_rec = F.relu(self.rec_fc2(z1_rec))
        z_rec_all.append(z2_rec)

        y_rec = self.rec_fc3(z2_rec)

        return y_rec, z_rec_all


class GENERATION(nn.Module):
    def __init__(self, input_size_gen, num_hidden_units_gen, reconstruct_size):
        super(GENERATION, self).__init__()
        self.gen_fc1 = nn.Linear(input_size_gen[0], num_hidden_units_gen[0])
        self.gen_fc2 = nn.Linear(input_size_gen[1], num_hidden_units_gen[1])
        self.gen_fc3 = nn.Linear(num_hidden_units_gen[-1], reconstruct_size)

    def forward(self, y):
        z_gen_all = []

        z2_gen = F.relu(self.gen_fc1(y))
        z_gen_all.append(z2_gen)

        z1_gen = F.relu(self.gen_fc2(z2_gen))
        z_gen_all.append(z1_gen)

        x_gen = F.relu(self.gen_fc3(z1_gen))

        return x_gen, z_gen_all

the model would work and also obtain a higher classification accuracy (nearly 92%).

However, if I define the same model in a more (I think) intelligent way, like

import torch.nn as nn
import torch.nn.functional as F


class RECOGNITION(nn.Module):
    def __init__(self, input_size_rec, num_hidden_units_rec, num_class):
        super(RECOGNITION, self).__init__()
        # self.rec_fc1 = nn.Linear(input_size_rec[0], num_hidden_units_rec[0])
        # self.rec_fc2 = nn.Linear(input_size_rec[1], num_hidden_units_rec[1])
        # self.rec_fc3 = nn.Linear(num_hidden_units_rec[-1], num_class)

        self.fcls = []
        for i, num_hunits in enumerate(num_hidden_units_rec):
            fcl = nn.Linear(input_size_rec[i], num_hunits)
            self.fcls.append(fcl)
        self.class_layer = nn.Linear(num_hidden_units_rec[-1], num_class)

    def forward(self, x):
        # z_rec_all = []
        #
        # z1_rec = F.relu(self.rec_fc1(x))
        # z_rec_all.append(z1_rec)
        #
        # z2_rec = F.relu(self.rec_fc2(z1_rec))
        # z_rec_all.append(z2_rec)
        #
        # y_rec = self.rec_fc3(z2_rec)

        z_rec_all = []
        for i, fcl in enumerate(self.fcls):
            x = F.relu(fcl(x))
            z_rec_all.append(x)
        y_rec = self.class_layer(x)
        return y_rec, z_rec_all


class GENERATION(nn.Module):
    def __init__(self, input_size_gen, num_hidden_units_gen, reconstruct_size):
        super(GENERATION, self).__init__()
        # self.gen_fc1 = nn.Linear(input_size_gen[0], num_hidden_units_gen[0])
        # self.gen_fc2 = nn.Linear(input_size_gen[1], num_hidden_units_gen[1])
        # self.gen_fc3 = nn.Linear(num_hidden_units_gen[-1], reconstruct_size)

        self.gen_fcls = []
        for j, num_hunits in enumerate(num_hidden_units_gen):
            gen_fcl = nn.Linear(input_size_gen[j], num_hunits)
            self.gen_fcls.append(gen_fcl)
        self.reconstruct_layer = nn.Linear(num_hidden_units_gen[-1], reconstruct_size)

    def forward(self, y):
        # z_gen_all = []
        #
        # z2_gen = F.relu(self.gen_fc1(y))
        # z_gen_all.append(z2_gen)
        #
        # z1_gen = F.relu(self.gen_fc2(z2_gen))
        # z_gen_all.append(z1_gen)
        #
        # x_gen = F.relu(self.gen_fc3(z1_gen))

        z_gen_all = []
        for j, gen_fcl in enumerate(self.gen_fcls):
            y = F.relu(gen_fcl(y))
            z_gen_all.append(y)
        x_gen = F.relu(self.reconstruct_layer(y))
        return x_gen, z_gen_all

it would work, but obtain a poorer classification accuracy (nearly 34%).

Anybody knows what might result in such a problem? Thanks in advance.

When you add the Linear layers by appending them to the list self.fcls they are not registered with the model. Therefore they don’t get trained. Use the following to ensure they are properly registered.

self.fcls = nn.ModuleList()
for i, num_hunits in enumerate(num_hidden_units_rec):
     fcl = nn.Linear(input_size_rec[i], num_hunits)
     self.fcls.append(fcl)
self.class_layer = nn.Linear(num_hidden_units_rec[-1], num_class)

alternatively you could register them manually using self.add_module("fcl"+str(i), fcl).

Either method will ensure that those layers get properly trained.

Hi, @jpeg729,

Thank you so much!!! You are right, when I replaced the self.fcls = [] with self.fcls = nn.ModuleList(), the model could obtain a normal result.:grinning: