TypeError: forward() takes 1 positional argument but 2 were given _

Hello, I have been trying to implement a small VGG network but run into this error. I have seen other posts with similar errors but none seem to match my case.

Here is the error message I am getting:

TypeError                                 Traceback (most recent call last)
/content/drive/My Drive/Colab Projects/uvadlc_practicals_2020/assignment_1/1_mlp_cnn/code/train_convnet_pytorch.py in <module>()
    206     FLAGS, unparsed = parser.parse_known_args()
    207 
--> 208     main()

6 frames
/content/drive/My Drive/Colab Projects/uvadlc_practicals_2020/assignment_1/1_mlp_cnn/code/train_convnet_pytorch.py in main()
    186 
    187     # Run the training operation
--> 188     train()
    189 
    190 

/content/drive/My Drive/Colab Projects/uvadlc_practicals_2020/assignment_1/1_mlp_cnn/code/train_convnet_pytorch.py in train()
    129         train_y = train_y.to(device)
    130 
--> 131         preds = model(train_x)
    132         labels = train_y.argmax(dim=-1)
    133 

/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
   1049         if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
   1050                 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1051             return forward_call(*input, **kwargs)
   1052         # Do not call functions when jit is used
   1053         full_backward_hooks, non_full_backward_hooks = [], []

/content/drive/My Drive/Colab Projects/uvadlc_practicals_2020/assignment_1/1_mlp_cnn/code/convnet_pytorch.py in forward(self, x)
    151         #######################
    152 
--> 153         x = self.input_net(x)
    154         x = self.blocks(x)
    155         out = self.output_net(x)

/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
   1049         if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
   1050                 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1051             return forward_call(*input, **kwargs)
   1052         # Do not call functions when jit is used
   1053         full_backward_hooks, non_full_backward_hooks = [], []

/usr/local/lib/python3.7/dist-packages/torch/nn/modules/container.py in forward(self, input)
    137     def forward(self, input):
    138         for module in self:
--> 139             input = module(input)
    140         return input
    141 

/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
   1049         if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
   1050                 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1051             return forward_call(*input, **kwargs)
   1052         # Do not call functions when jit is used
   1053         full_backward_hooks, non_full_backward_hooks = [], []

TypeError: forward() takes 1 positional argument but 2 were given

Following is my actual code:

class PreActResnetBlock(nn.Module):
  def __init__(self, c_in, c_out, kernel=3, stride=1, padding=1):
    super().__init__()
    self.net = nn.Sequential(
                  nn.BatchNorm2d(c_in),
                  nn.ReLU(),
                  nn.Conv2d(c_in, c_out, kernel_size=kernel, padding=padding, stride=stride, bias=False)            
    )

  def forward(x):
    out = self.net(x)
    out += x

class VGGBlock(nn.Module):
  
  def __init__(self, c_in, c_out, last_block=False):
    super().__init__()
    layers = []
    if not last_block:
      layers.append(nn.Conv2d(c_in, c_out, kernel_size=1, padding=0, stride=1))
    
    layers.extend([nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
                    PreActResnetBlock(c_out, c_out),
                    PreActResnetBlock(c_out, c_out)])

    self.net = nn.Sequential(*layers)

  def forward(x):
    x = self.net(x)
    return x


class ConvNet(nn.Module):
    def __init__(self, n_channels, n_classes):
        super().__init__()
        self.hparams = SimpleNamespace(n_channels = n_channels,
                                        n_classes = n_classes)        
        self._create_network()

    def _create_network(self):
      hidden_dims = [64, 128, 256, 512]

      # Stemm to scale up the channel size
      c_out = hidden_dims[0]
      self.input_net = nn.Sequential(
                          nn.Conv2d(self.hparams.n_channels, c_out, kernel_size=3, padding=1, bias=False),
                          PreActResnetBlock(64, 64)
      )

      #VGGBlocks
      blocks = []      
      for i in range(4):
        if i == 3:
          c_in = c_out = hidden_dims[-1]
          blocks.append(VGGBlock(c_in, c_out, last_block=True)) 
          break

        c_in = hidden_dims[i]
        c_out = hidden_dims[i+1]
        blocks.append(VGGBlock(c_in, c_out))

      self.blocks = nn.Sequential(*blocks)
      

      # Mapping for classification head to target
      self.output_net = nn.Sequential(
                            nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
                            nn.Linear(c_out, self.hparams.n_classes)
      )

    
    def forward(self, x):
        x = self.input_net(x)
        x = self.blocks(x)
        out = self.output_net(x)

        return out

You missed the self argument on your forward method. It should be the first argument.

1 Like

Thanks, can’t believe I didn’t notice that.

1 Like