Model summary Unet

When I print summary(model,(1,512,512)) I get so many RELU printed out, while there should actually be only a few. I am assuming something is wrong with my model.py code. Can someone please help me out find whats wrong?

class VGGBlock(nn.Module):
    def __init__(self, in_channels, middle_channels, out_channels, act_func=nn.ReLU(inplace=True)):
        super(VGGBlock, self).__init__()
        self.act_func = act_func
        self.conv1 = nn.Conv2d(in_channels, middle_channels, 3, padding=1)
        self.bn1 = nn.BatchNorm2d(middle_channels)
        self.conv2 = nn.Conv2d(middle_channels, out_channels, 3, padding=1)
        self.bn2 = nn.BatchNorm2d(out_channels)

    def forward(self, x):
        out = self.conv1(x)
        out = self.bn1(out)
        out = self.act_func(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.act_func(out)

        return out


class UNet(nn.Module):
    def __init__(self, args):
        super().__init__()

        self.args = args

        nb_filter = [32, 64, 128, 256, 512]

        self.pool = nn.MaxPool2d(2, 2)
        self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)

        self.conv0_0 = VGGBlock(args['input_channels'], nb_filter[0], nb_filter[0])
        self.conv1_0 = VGGBlock(nb_filter[0], nb_filter[1], nb_filter[1])
        self.conv2_0 = VGGBlock(nb_filter[1], nb_filter[2], nb_filter[2])
        self.conv3_0 = VGGBlock(nb_filter[2], nb_filter[3], nb_filter[3])
        self.conv4_0 = VGGBlock(nb_filter[3], nb_filter[4], nb_filter[4])

        self.conv3_1 = VGGBlock(nb_filter[3]+nb_filter[4], nb_filter[3], nb_filter[3])
        self.conv2_2 = VGGBlock(nb_filter[2]+nb_filter[3], nb_filter[2], nb_filter[2])
        self.conv1_3 = VGGBlock(nb_filter[1]+nb_filter[2], nb_filter[1], nb_filter[1])
        self.conv0_4 = VGGBlock(nb_filter[0]+nb_filter[1], nb_filter[0], nb_filter[0])

        self.final = nn.Conv2d(nb_filter[0], 1, kernel_size=1)


    def forward(self, input):
        x0_0 = self.conv0_0(input)
        print(x0_0.shape)
        x1_0 = self.conv1_0(self.pool(x0_0))
        print(x1_0.shape)
        x2_0 = self.conv2_0(self.pool(x1_0))
        print(x2_0.shape)
        x3_0 = self.conv3_0(self.pool(x2_0))
        print(x3_0.shape)
        x4_0 = self.conv4_0(self.pool(x3_0))
        print(x4_0.shape)
        print(" ")
        x3_1 = self.conv3_1(torch.cat([x3_0, self.up(x4_0)], 1))
        print(x3_1.shape)
        x2_2 = self.conv2_2(torch.cat([x2_0, self.up(x3_1)], 1))
        print(x2_2.shape)
        x1_3 = self.conv1_3(torch.cat([x1_0, self.up(x2_2)], 1))
        print(x1_3.shape)
        x0_4 = self.conv0_4(torch.cat([x0_0, self.up(x1_3)], 1))
        print(x0_4.shape)

        output = self.final(x0_4)
        output = F.sigmoid(output)


        print("THE OUTPUT IS:" ,output.shape)
        return output

    Layer (type)               Output Shape         Param #

================================================================
Conv2d-1 [-1, 32, 512, 512] 320
BatchNorm2d-2 [-1, 32, 512, 512] 64
ReLU-3 [-1, 32, 512, 512] 0
ReLU-4 [-1, 32, 512, 512] 0
ReLU-5 [-1, 32, 512, 512] 0
ReLU-6 [-1, 32, 512, 512] 0
ReLU-7 [-1, 32, 512, 512] 0
ReLU-8 [-1, 32, 512, 512] 0
ReLU-9 [-1, 32, 512, 512] 0
ReLU-10 [-1, 32, 512, 512] 0
ReLU-11 [-1, 32, 512, 512] 0
Conv2d-12 [-1, 32, 512, 512] 9,248
BatchNorm2d-13 [-1, 32, 512, 512] 64
ReLU-14 [-1, 32, 512, 512] 0
ReLU-15 [-1, 32, 512, 512] 0
ReLU-16 [-1, 32, 512, 512] 0
ReLU-17 [-1, 32, 512, 512] 0
ReLU-18 [-1, 32, 512, 512] 0
ReLU-19 [-1, 32, 512, 512] 0
ReLU-20 [-1, 32, 512, 512] 0
ReLU-21 [-1, 32, 512, 512] 0
ReLU-22 [-1, 32, 512, 512] 0
VGGBlock-23 [-1, 32, 512, 512] 0
MaxPool2d-24 [-1, 32, 256, 256] 0
Conv2d-25 [-1, 64, 256, 256] 18,496
BatchNorm2d-26 [-1, 64, 256, 256] 128
ReLU-27 [-1, 64, 256, 256] 0
ReLU-28 [-1, 64, 256, 256] 0
ReLU-29 [-1, 64, 256, 256] 0
ReLU-30 [-1, 64, 256, 256] 0
ReLU-31 [-1, 64, 256, 256] 0
ReLU-32 [-1, 64, 256, 256] 0
ReLU-33 [-1, 64, 256, 256] 0
ReLU-34 [-1, 64, 256, 256] 0
ReLU-35 [-1, 64, 256, 256] 0
Conv2d-36 [-1, 64, 256, 256] 36,928
BatchNorm2d-37 [-1, 64, 256, 256] 128
ReLU-38 [-1, 64, 256, 256] 0
ReLU-39 [-1, 64, 256, 256] 0
ReLU-40 [-1, 64, 256, 256] 0
ReLU-41 [-1, 64, 256, 256] 0
ReLU-42 [-1, 64, 256, 256] 0
ReLU-43 [-1, 64, 256, 256] 0
ReLU-44 [-1, 64, 256, 256] 0
ReLU-45 [-1, 64, 256, 256] 0
ReLU-46 [-1, 64, 256, 256] 0
VGGBlock-47 [-1, 64, 256, 256] 0
MaxPool2d-48 [-1, 64, 128, 128] 0
Conv2d-49 [-1, 128, 128, 128] 73,856
BatchNorm2d-50 [-1, 128, 128, 128] 256
ReLU-51 [-1, 128, 128, 128] 0
ReLU-52 [-1, 128, 128, 128] 0
ReLU-53 [-1, 128, 128, 128] 0
ReLU-54 [-1, 128, 128, 128] 0
ReLU-55 [-1, 128, 128, 128] 0
ReLU-56 [-1, 128, 128, 128] 0
ReLU-57 [-1, 128, 128, 128] 0
ReLU-58 [-1, 128, 128, 128] 0
ReLU-59 [-1, 128, 128, 128] 0
Conv2d-60 [-1, 128, 128, 128] 147,584
BatchNorm2d-61 [-1, 128, 128, 128] 256
ReLU-62 [-1, 128, 128, 128] 0
ReLU-63 [-1, 128, 128, 128] 0
ReLU-64 [-1, 128, 128, 128] 0
ReLU-65 [-1, 128, 128, 128] 0
ReLU-66 [-1, 128, 128, 128] 0
ReLU-67 [-1, 128, 128, 128] 0
ReLU-68 [-1, 128, 128, 128] 0
ReLU-69 [-1, 128, 128, 128] 0
ReLU-70 [-1, 128, 128, 128] 0
VGGBlock-71 [-1, 128, 128, 128] 0
MaxPool2d-72 [-1, 128, 64, 64] 0
Conv2d-73 [-1, 256, 64, 64] 295,168
BatchNorm2d-74 [-1, 256, 64, 64] 512
ReLU-75 [-1, 256, 64, 64] 0
ReLU-76 [-1, 256, 64, 64] 0
ReLU-77 [-1, 256, 64, 64] 0
ReLU-78 [-1, 256, 64, 64] 0
ReLU-79 [-1, 256, 64, 64] 0
ReLU-80 [-1, 256, 64, 64] 0
ReLU-81 [-1, 256, 64, 64] 0
ReLU-82 [-1, 256, 64, 64] 0
ReLU-83 [-1, 256, 64, 64] 0
Conv2d-84 [-1, 256, 64, 64] 590,080
BatchNorm2d-85 [-1, 256, 64, 64] 512
ReLU-86 [-1, 256, 64, 64] 0
ReLU-87 [-1, 256, 64, 64] 0
ReLU-88 [-1, 256, 64, 64] 0
ReLU-89 [-1, 256, 64, 64] 0
ReLU-90 [-1, 256, 64, 64] 0
ReLU-91 [-1, 256, 64, 64] 0
ReLU-92 [-1, 256, 64, 64] 0
ReLU-93 [-1, 256, 64, 64] 0
ReLU-94 [-1, 256, 64, 64] 0
VGGBlock-95 [-1, 256, 64, 64] 0
MaxPool2d-96 [-1, 256, 32, 32] 0
Conv2d-97 [-1, 512, 32, 32] 1,180,160
BatchNorm2d-98 [-1, 512, 32, 32] 1,024
ReLU-99 [-1, 512, 32, 32] 0
ReLU-100 [-1, 512, 32, 32] 0
ReLU-101 [-1, 512, 32, 32] 0
ReLU-102 [-1, 512, 32, 32] 0
ReLU-103 [-1, 512, 32, 32] 0
ReLU-104 [-1, 512, 32, 32] 0
ReLU-105 [-1, 512, 32, 32] 0
ReLU-106 [-1, 512, 32, 32] 0
ReLU-107 [-1, 512, 32, 32] 0
Conv2d-108 [-1, 512, 32, 32] 2,359,808
BatchNorm2d-109 [-1, 512, 32, 32] 1,024
ReLU-110 [-1, 512, 32, 32] 0
ReLU-111 [-1, 512, 32, 32] 0
ReLU-112 [-1, 512, 32, 32] 0
ReLU-113 [-1, 512, 32, 32] 0
ReLU-114 [-1, 512, 32, 32] 0
ReLU-115 [-1, 512, 32, 32] 0
ReLU-116 [-1, 512, 32, 32] 0
ReLU-117 [-1, 512, 32, 32] 0
ReLU-118 [-1, 512, 32, 32] 0
VGGBlock-119 [-1, 512, 32, 32] 0
Upsample-120 [-1, 512, 64, 64] 0
Conv2d-121 [-1, 256, 64, 64] 1,769,728
BatchNorm2d-122 [-1, 256, 64, 64] 512
ReLU-123 [-1, 256, 64, 64] 0
ReLU-124 [-1, 256, 64, 64] 0
ReLU-125 [-1, 256, 64, 64] 0
ReLU-126 [-1, 256, 64, 64] 0
ReLU-127 [-1, 256, 64, 64] 0
ReLU-128 [-1, 256, 64, 64] 0
ReLU-129 [-1, 256, 64, 64] 0
ReLU-130 [-1, 256, 64, 64] 0
ReLU-131 [-1, 256, 64, 64] 0
Conv2d-132 [-1, 256, 64, 64] 590,080
BatchNorm2d-133 [-1, 256, 64, 64] 512
ReLU-134 [-1, 256, 64, 64] 0
ReLU-135 [-1, 256, 64, 64] 0
ReLU-136 [-1, 256, 64, 64] 0
ReLU-137 [-1, 256, 64, 64] 0
ReLU-138 [-1, 256, 64, 64] 0
ReLU-139 [-1, 256, 64, 64] 0
ReLU-140 [-1, 256, 64, 64] 0
ReLU-141 [-1, 256, 64, 64] 0
ReLU-142 [-1, 256, 64, 64] 0
VGGBlock-143 [-1, 256, 64, 64] 0
Upsample-144 [-1, 256, 128, 128] 0
Conv2d-145 [-1, 128, 128, 128] 442,496
BatchNorm2d-146 [-1, 128, 128, 128] 256
ReLU-147 [-1, 128, 128, 128] 0
ReLU-148 [-1, 128, 128, 128] 0
ReLU-149 [-1, 128, 128, 128] 0
ReLU-150 [-1, 128, 128, 128] 0
ReLU-151 [-1, 128, 128, 128] 0
ReLU-152 [-1, 128, 128, 128] 0
ReLU-153 [-1, 128, 128, 128] 0
ReLU-154 [-1, 128, 128, 128] 0
ReLU-155 [-1, 128, 128, 128] 0
Conv2d-156 [-1, 128, 128, 128] 147,584
BatchNorm2d-157 [-1, 128, 128, 128] 256
ReLU-158 [-1, 128, 128, 128] 0
ReLU-159 [-1, 128, 128, 128] 0
ReLU-160 [-1, 128, 128, 128] 0
ReLU-161 [-1, 128, 128, 128] 0
ReLU-162 [-1, 128, 128, 128] 0
ReLU-163 [-1, 128, 128, 128] 0
ReLU-164 [-1, 128, 128, 128] 0
ReLU-165 [-1, 128, 128, 128] 0
ReLU-166 [-1, 128, 128, 128] 0
VGGBlock-167 [-1, 128, 128, 128] 0
Upsample-168 [-1, 128, 256, 256] 0
Conv2d-169 [-1, 64, 256, 256] 110,656
BatchNorm2d-170 [-1, 64, 256, 256] 128
ReLU-171 [-1, 64, 256, 256] 0
ReLU-172 [-1, 64, 256, 256] 0
ReLU-173 [-1, 64, 256, 256] 0
ReLU-174 [-1, 64, 256, 256] 0
ReLU-175 [-1, 64, 256, 256] 0
ReLU-176 [-1, 64, 256, 256] 0
ReLU-177 [-1, 64, 256, 256] 0
ReLU-178 [-1, 64, 256, 256] 0
ReLU-179 [-1, 64, 256, 256] 0
Conv2d-180 [-1, 64, 256, 256] 36,928
BatchNorm2d-181 [-1, 64, 256, 256] 128
ReLU-182 [-1, 64, 256, 256] 0
ReLU-183 [-1, 64, 256, 256] 0
ReLU-184 [-1, 64, 256, 256] 0
ReLU-185 [-1, 64, 256, 256] 0
ReLU-186 [-1, 64, 256, 256] 0
ReLU-187 [-1, 64, 256, 256] 0
ReLU-188 [-1, 64, 256, 256] 0
ReLU-189 [-1, 64, 256, 256] 0
ReLU-190 [-1, 64, 256, 256] 0
VGGBlock-191 [-1, 64, 256, 256] 0
Upsample-192 [-1, 64, 512, 512] 0
Conv2d-193 [-1, 32, 512, 512] 27,680
BatchNorm2d-194 [-1, 32, 512, 512] 64
ReLU-195 [-1, 32, 512, 512] 0
ReLU-196 [-1, 32, 512, 512] 0
ReLU-197 [-1, 32, 512, 512] 0
ReLU-198 [-1, 32, 512, 512] 0
ReLU-199 [-1, 32, 512, 512] 0
ReLU-200 [-1, 32, 512, 512] 0
ReLU-201 [-1, 32, 512, 512] 0
ReLU-202 [-1, 32, 512, 512] 0
ReLU-203 [-1, 32, 512, 512] 0
Conv2d-204 [-1, 32, 512, 512] 9,248
BatchNorm2d-205 [-1, 32, 512, 512] 64
ReLU-206 [-1, 32, 512, 512] 0
ReLU-207 [-1, 32, 512, 512] 0
ReLU-208 [-1, 32, 512, 512] 0
ReLU-209 [-1, 32, 512, 512] 0
ReLU-210 [-1, 32, 512, 512] 0
ReLU-211 [-1, 32, 512, 512] 0
ReLU-212 [-1, 32, 512, 512] 0
ReLU-213 [-1, 32, 512, 512] 0
ReLU-214 [-1, 32, 512, 512] 0
VGGBlock-215 [-1, 32, 512, 512] 0
Conv2d-216 [-1, 1, 512, 512] 33

Your code looks alright.
Could this be a bug from the summary lib you are using? Would you mind opening an issue on their GitHub and check, if that’s the case?