Function nn.Sequential should optimize

when I want to use nn.Sequential() in this way, it turns out a TypeError

class Discrim(nn.Module):
    channels, maxpool_mask = [13, 64, 192, 384, 256, 256], [1, 1, 0, 0, 1]
    ker_size, strd, pad = [2, 5, 3, 3, 3], [2, 1, 1, 1, 1], [0, 2, 1, 1, 1]
    
    def __init__(self, classes=13, conv_layers=5):
        super(Discrim, self).__init__()
        self.classes = classes
        conv_features = []
        for index in range(conv_layers):
            conv_features.append(nn.Conv2d(Discrim.channels[index], Discrim.channels[index+1],
                                           kernel_size=Discrim.ker_size[index],
                                           stride=Discrim.strd[index],
                                           padding=Discrim.pad[index],
                                           bias=False))
            conv_features.append(nn.BatchNorm2d(Discrim.channels[index+1]))
            conv_features.append(nn.ReLU(inplace=True))
            if Discrim.maxpool_mask[index] == 1:
                conv_features.append(nn.MaxPool2d(3, stride=2, padding=1))
            else:
                conv_features.append(nn.ReLU(inplace=True))
        self.features = nn.Sequential(conv_features[i] for layer_num in range(4*conv_layers))

    def forward(self, x):
        out = self.features(x)
        return out


net = Discrim()

then the output is:

File "test.py", line 184, in <module>
    net = Discrim()
  File "test.py", line 177, in __init__
    self.features = nn.Sequential(conv_features[i] for layer_num in range(4*conv_layers))
  File "/usr/local/lib/python3.5/dist-packages/torch/nn/modules/container.py", line 52, in __init__
    self.add_module(str(idx), module)
  File "/usr/local/lib/python3.5/dist-packages/torch/nn/modules/module.py", line 171, in add_module
    torch.typename(module)))
TypeError: Discrim.__init__.<locals>.<genexpr> is not a Module subclass

do I make a mistake here?

The following is working for me:

class Discrim(nn.Module):
    channels, maxpool_mask = [13, 64, 192, 384, 256, 256], [1, 1, 0, 0, 1]
    ker_size, strd, pad = [2, 5, 3, 3, 3], [2, 1, 1, 1, 1], [0, 2, 1, 1, 1]


    def __init__(self, classes=13, conv_layers=5):
        super(Discrim, self).__init__()
        self.classes = classes
        conv_features = []
        for index in range(conv_layers):
            conv_features.append(nn.Conv2d(Discrim.channels[index], Discrim.channels[index + 1],
                                           kernel_size=Discrim.ker_size[index],
                                           stride=Discrim.strd[index],
                                           padding=Discrim.pad[index],
                                           bias=False))
            conv_features.append(nn.BatchNorm2d(Discrim.channels[index + 1]))
            conv_features.append(nn.ReLU(inplace=True))
            if Discrim.maxpool_mask[index] == 1:
                conv_features.append(nn.MaxPool2d(3, stride=2, padding=1))
            else:
                conv_features.append(nn.ReLU(inplace=True))
        self.features = nn.Sequential(*conv_features)
    
    
    def forward(self, x):
        out = self.features(x)
        return out


net = Discrim()

I only changed the line self.features = nn.Sequential(*conv_features). The Problem here is, that you index with i but i is not defined. Since you iterate over all layers, I just unpacked the list, which should basically do the same.

1 Like

Thank you so much, how stupid I am!