Pytorch can not trace my onnx model (ignored a lot of layers)

Very interesting, I have a model like this:

class YOLOv3(nn.Module):

    def __init__(self, num_classes=80, ignore_thre=0.7, label_smooth=False, rfb=False, vis=False, asff=False):
        super(YOLOv3, self).__init__()
        self.module_list = build_yolov3_modules(
            num_classes, ignore_thre, label_smooth, rfb)

        self.level_0_fusion = ASFF(level=0, rfb=rfb, vis=vis)
        self.level_0_header = YOLOv3Head(anch_mask=[6, 7, 8], n_classes=num_classes, stride=32, in_ch=1024,
                                         ignore_thre=ignore_thre, label_smooth=label_smooth, rfb=rfb)
        self.level_1_fusion = ASFF(level=1, rfb=rfb, vis=vis)
        self.level_1_header = YOLOv3Head(anch_mask=[3, 4, 5], n_classes=num_classes, stride=16, in_ch=512,
                                         ignore_thre=ignore_thre, label_smooth=label_smooth, rfb=rfb)
        self.level_2_fusion = ASFF(level=2, rfb=rfb, vis=vis)
        self.level_2_header = YOLOv3Head(anch_mask=[0, 1, 2], n_classes=num_classes, stride=8, in_ch=256,
                                         ignore_thre=ignore_thre, label_smooth=label_smooth, rfb=rfb)

    def forward(self, x, targets=None, epoch=0):
        output = []
        route_layers = []

        for i, module in enumerate(self.module_list):
            x = module(x)
            if i in [6, 8, 17, 24, 32]:
                route_layers.append(x)
            if i == 19:
                x = torch.cat((x, route_layers[1]), 1)
            if i == 26:
                x = torch.cat((x, route_layers[0]), 1)
        print(len(route_layers))
        fused_0 = self.level_0_fusion(route_layers[2], route_layers[3], route_layers[4])
        x = self.level_0_header(fused_0)
        output.append(x)

        fused_1 = self.level_1_fusion(route_layers[2], route_layers[3], route_layers[4])
        x = self.level_1_header(fused_1)
        output.append(x)

        fused_2 = self.level_2_fusion(route_layers[2], route_layers[3], route_layers[4])
        x = self.level_2_header(fused_2)
        output.append(x)
        return torch.cat(output, 1)

This model when using torch.onnx.export it generate a ONNX model contains only an input and a Constant, log like this:

      %level_2_header.Feature_adaption.dconv.weight : Float(256, 256, 3, 3),
      %level_2_header.Feature_adaption.dconv.bias : Float(256),
      %level_2_header.conv.weight : Float(340, 256, 1, 1),
      %level_2_header.conv.bias : Float(340)):
  %559 : Float(1, 52500, 85) = onnx::Constant[value=<Tensor>]()
  return (%559)

interesting thing is that, somehow I comment out these lines:

 def forward(self, x, targets=None, epoch=0):
        output = []
        route_layers = []

        for i, module in enumerate(self.module_list):
            x = module(x)
            if i in [6, 8, 17, 24, 32]:
                route_layers.append(x)
            if i == 19:
                x = torch.cat((x, route_layers[1]), 1)
            if i == 26:
                x = torch.cat((x, route_layers[0]), 1)
        # print(len(route_layers))
        # fused_0 = self.level_0_fusion(route_layers[2], route_layers[3], route_layers[4])
        # x = self.level_0_header(fused_0)
        # output.append(x)

        # fused_1 = self.level_1_fusion(route_layers[2], route_layers[3], route_layers[4])
        # x = self.level_1_header(fused_1)
        # output.append(x)

        # fused_2 = self.level_2_fusion(route_layers[2], route_layers[3], route_layers[4])
        # x = self.level_2_header(fused_2)
        # output.append(x)
        # return torch.cat(output, 1)
        return x

It can trace normal ONNX model.

Does anybody can help me debug this weird issue?

Does anybody could help me debug it?
One bitcoin is for you if you can found the root reason.

Can you post a full repro to a gist or something ?

Actually,. this model comes from this repo: https://github.com/ruinmessi/ASFF

I am adding onnx export for it