Here is my model but it gives me the runtime error for size mismatch
self.conv1 = nn.Conv2d(1, 32, 5)
self.bn1 = nn.BatchNorm2d(32)
self.pool_L1 = nn.MaxPool2d(2,2)
self.conv2 = nn.Conv2d(32, 64, 3)
self.bn2 = nn.BatchNorm2d(64)
self.pool_L2 = nn.MaxPool2d(2,2)
self.conv3 = nn.Conv2d(64, 128, 3)
self.bn3 = nn.BatchNorm2d(128)
self.pool_L3 = nn.MaxPool2d(2,2)
self.conv4 = nn.Conv2d(128, 256, 3)
self.bn4 = nn.BatchNorm2d(256)
self.pool_L4 = nn.MaxPool2d(2,2)
self.conv5 = nn.Conv2d(256, 512, 3)
self.bn5 = nn.BatchNorm2d(512)
self.pool_L5 = nn.MaxPool2d(2,2)
self.dense1 = nn.Linear( 6*6*512 ,1024 )
self.dense2 = nn.Linear( 1024, 136)
## Note that among the layers to add, consider including:
# maxpooling layers, multiple conv layers, fully-connected layers, and other layers (such as dropout or batch normalization) to avoid overfitting
self.drop1 = nn.Dropout(0.20)
self.drop2 = nn.Dropout(0.25)
self.drop3 = nn.Dropout(0.30)
self.drop4 = nn.Dropout(0.35)
self.drop5 = nn.Dropout(0.40)
self.drop6 = nn.Dropout(0.25)
def forward(self, x):
## TODO: Define the feedforward behavior of this model
## x is the input image and, as an example, here you may choose to include a pool/conv step:
## x = self.pool(F.relu(self.conv1(x)))
x = self.bn1(F.relu(self.conv1(x)))
x = self.pool_L1(x)
x = self.drop1(x)
x = self.bn2(F.relu(self.conv2(x)))
x = self.pool_L2(x)
x = self.drop2(x)
x = self.bn3(F.relu(self.conv3(x)))
x = self.pool_L3(x)
x = self.drop3(x)
x = self.bn4(F.relu(self.conv4(x)))
x = self.pool_L4(x)
x = self.drop4(x)
x = self.bn5(F.relu(self.conv5(x)))
x = self.pool_L5(x)
x = self.drop5(x)
x = x.view(x.size(0),-1) # Used to redefine the shape of the x by rows:-x.size(0), just like numpy.
x = F.relu(self.dense1(x))
x = self.drop6(x)
x = F.relu(self.dense2(x))
RuntimeError: size mismatch, m1: [10 x 12800], m2: [18432 x 1024]
Here 10 in m1 I have take the batch size