RuntimeError: Given groups=1, weight of size [32, 16, 3, 3], expected input[32, 32, 56, 56] to have 16 channels, but got 32 channels instead

batch_size = 32
class Net(nn.Module):

def __init__(self):

    super(Net,self).__init__()

    self.conv1 = nn.Conv2d(3,16,3,padding=1)

    self.conv2 = nn.Conv2d(16,32,3,padding=1)

    #self.conv2_bn = nn.BatchNorm2d(32)

    self.conv3 = nn.Conv2d(32,64,3,padding=1)

    #self.conv3_bn = nn.BatchNorm2d(64)

    self.pool = nn.MaxPool2d(2,2)

    self.fc1 = nn.Linear(64*28*28,500)

    self.fc2 = nn.Linear(500,2)

    self.dropout = nn.Dropout(0.3)



def forward(self,x):

    #x = torch.randn(16, 3, 3, 3)

    x = self.pool(F.relu(self.conv1(x)))

    x = self.pool(F.relu(self.conv2(x)))

    x = self.pool(F.relu(self.conv2(x)))

    #flattern the image

    print(x.shape)

    x = x.view(-1,64*28*28)

    x = self.dropout(x)

    x = F.relu(self.fc1(x))

    x = self.dropout(x)

    x = F.relu(self.fc2(x))

    return x

model = Net()

print(model)

if train_on_gpu:

model.cuda()

RuntimeError Traceback (most recent call last)
in ()
9 data,target =data.cuda(), target.cuda()
10 optimizer.zero_grad()
—> 11 output = model(data)
12 loss = criterion(output, target)
13 loss.backward()

4 frames
/usr/local/lib/python3.6/dist-packages/torch/nn/modules/conv.py in _conv_forward(self, input, weight)
414 _pair(0), self.dilation, self.groups)
415 return F.conv2d(input, weight, self.bias, self.stride,
–> 416 self.padding, self.dilation, self.groups)
417
418 def forward(self, input: Tensor) -> Tensor:

RuntimeError: Given groups=1, weight of size [32, 16, 3, 3], expected input[32, 32, 56, 56] to have 16 channels, but got 32 channels instead

Hy @Janhawi, have you inserted the batch size batch size before in the input before passing it to the model
input should be like (batch_size,chaneels,width,height)
can you confirm this first.

Here is your mistake. You copied the line but forgot to change the the second conv2 to conv3


def forward(self,x):

    #x = torch.randn(16, 3, 3, 3)

    x = self.pool(F.relu(self.conv1(x)))

    x = self.pool(F.relu(self.conv2(x)))

    x = self.pool(F.relu(self.conv2(x)))        # <--- HERE

    #flattern the image

    print(x.shape)

    x = x.view(-1,64*28*28)

    x = self.dropout(x)

    x = F.relu(self.fc1(x))

    x = self.dropout(x)

    x = F.relu(self.fc2(x))

    return x

Yaa got it …Thank you so much

1 Like