How to skip layer without weights

    def __init__(self):
        super(DNet, self).__init__()

        
        
        self.conv2_1 = nn.Conv2d(1, 2, 3, 1, 1)
        self.relu_1 = nn.ReLU()
        self.maxpool_1 = nn.MaxPool1d(2, 2)
        self.conv2_2 = nn.Conv2d(2, 3, 1, 1, 4)
        self.relu_2 = nn.ReLU()
        self.maxpool_2 = nn.MaxPool1d(2, 2)
        self.conv2_3 = nn.Conv2d(2, 3, 1, 0, 8)
        self.fc = nn.Linear(8, 1)

        self._weight_init()

    def _weight_init(self):
        for module in self.children():
            print(module.weight)
            if (module != nn.ReLU()):
#             module.weight = nn.init.kaiming_uniform()
                nn.init.kaiming_uniform_(module.weight)
                module.bias.data.fill_(0.0)

While looping, I want to skip my ReLU layer. How do I do that? I also tried

if module.weight is not None

Oh, I misunderstood. Try something like this:

def __init__(self):
    super(DNet, self).__init__()
    self.conv2_1 = nn.Conv2d(1, 2, 3, 1, 1)
    self.relu_1 = nn.ReLU()
    self.maxpool_1 = nn.MaxPool1d(2, 2)
    self.conv2_2 = nn.Conv2d(2, 3, 1, 1, 4)
    self.relu_2 = nn.ReLU()
    self.maxpool_2 = nn.MaxPool1d(2, 2)
    self.conv2_3 = nn.Conv2d(2, 3, 1, 0, 8)
    self.fc = nn.Linear(8, 1)

    self.apply(self._weight_init)

def _weight_init(self, m):
    if not isinstance(m, nn.ReLU):
        (your init code)

FYI, since you have other layers without parameters (such as nn.MaxPool1d), this would be better:

def _weight_init(self, m):
    if isinstance(m, (nn.Conv2d, nn.Linear)):
        (your init code)