I don't know why the first layer is double

import torch.nn.functional as F
import torch.nn as nn
from modeling.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d
from torchsummary import summary

def conv_bn(inp, oup, stride, BatchNorm):
    return nn.Sequential(
        nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
        BatchNorm(oup),
        nn.ReLU6(inplace=True)
    )

class test(nn.Module):
    def __init__(self):
        super(test, self).__init__()

        input_channel = 32
        self.features = [conv_bn(3, input_channel, 2, SynchronizedBatchNorm2d)]
        self.features.append(conv_bn(input_channel, input_channel, 1, SynchronizedBatchNorm2d))
        self.features.append(conv_bn(input_channel, input_channel, 1, SynchronizedBatchNorm2d))
        self.features = nn.Sequential(*self.features)
        self.low_features = self.features[0] # this one is weird
        self.a = nn.Conv2d(input_channel,input_channel,1,1,1)
        self.b = self.a
        # print(self.features)
        # print(self.low_features)
    def forward(self,x):
        x = self.features(x)
        x = self.a(x)
        
        return x
b = test().cuda()

summary(b,(3,960,960))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1         [-1, 32, 480, 480]             864
            Conv2d-2         [-1, 32, 480, 480]             864
SynchronizedBatchNorm2d-3         [-1, 32, 480, 480]              64
SynchronizedBatchNorm2d-4         [-1, 32, 480, 480]              64
             ReLU6-5         [-1, 32, 480, 480]               0
             ReLU6-6         [-1, 32, 480, 480]               0
            Conv2d-7         [-1, 32, 480, 480]           9,216
SynchronizedBatchNorm2d-8         [-1, 32, 480, 480]              64
             ReLU6-9         [-1, 32, 480, 480]               0
           Conv2d-10         [-1, 32, 480, 480]           9,216
SynchronizedBatchNorm2d-11         [-1, 32, 480, 480]              64
            ReLU6-12         [-1, 32, 480, 480]               0
           Conv2d-13         [-1, 32, 482, 482]           1,056
================================================================
Total params: 21,472
Trainable params: 21,472
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 10.55
Forward/backward pass size (MB): 731.72
Params size (MB): 0.08
Estimated Total Size (MB): 742.35
----------------------------------------------------------------

i just use one sequential layer but summary output show the first conv_bn layer double when i save “self.low_features”.

Is it affect to model?

The layer is caculated?

python==3.9.16
torch==1.13.1+cu116
torchsummary==1.5.1

The summary output might not be aware of the reuse of the layer and might thus print it again. You could double check if the expected number of parameters is returned from model.named_parameters().

1 Like

Thank you!

Now i understand.