I got a size mismatch problem

RuntimeError: size mismatch, m1: [64 x 1024], m2: [2048 x 1024]
this is my code.
class nnModel(nn.Module):

def __init__(self, num_classes=32):
    super(nnModel, self).__init__()
    self.para1 = resnet18()
    self.para2 = resnet18()
    self.para3 = nn.Sequential(
        nn.Conv2d(1, 8, 5),
        nn.BatchNorm2d(8),
        nn.LeakyReLU(),
        nn.MaxPool2d(3, stride=2)
    )
    self.para3_fc = nn.Sequential(
        nn.Linear(95048, 64),
        nn.BatchNorm1d(64),
        nn.LeakyReLU()
    )
    self.NN1 = nn.Sequential(
        nn.Linear(2048, 1024),
        nn.BatchNorm1d(1024),
        nn.LeakyReLU()
    )
    self.fc1 = nn.Sequential(
        nn.Linear(1024 + 64 + 5, 1024),
        nn.BatchNorm1d(1024),
        nn.LeakyReLU()
    )
    self.fc2 = nn.Sequential(
        nn.Linear(1024, 512),
        nn.BatchNorm1d(512),
        nn.LeakyReLU()
    )
    self.fc3 = nn.Sequential(
        nn.Linear(1024 + 512, 248),
        nn.BatchNorm1d(248),
        nn.LeakyReLU(),
        nn.Linear(248, num_classes)
    )

def forward(self, faces, par1, par2, orientation, height, width, cam_x, cam_y):
    x_l = self.para1(par1)
    x_r = self.para2(par2)
    x_e = torch.cat((x_l, x_r), 1)
    x_e = self.NN1(x_e)
    x_f = self.para3(faces)
    x_f = x_f.reshape(x_f.size(0), -1)
    x_f = self.para3_fc(x_f)
    x = torch.cat((x_e, x_f, orientation.unsqueeze(1), height.unsqueeze(1), width.unsqueeze(1), cam_x.unsqueeze(1),
                   cam_y.unsqueeze(1)), 1)
    x_fc1 = self.fc1(x)
    x_fc2 = self.fc2(x_fc1)
    x = self.fc3(torch.cat((x_e, x_fc2), 1))
    return x

The error is very very clear; just change your Linear layers dimensions properly; plus why hard coding the dims like that?

(the error probably lies in this line x_e = self.NN1(x_e))

(Check the attribute nn.linear.out_features) (valid for other layers as well(at least the commonly used ones))

m1: [64 x 1024], m2: [2048 x 1024]

this says that you are having some error in your dimension calculation and matrix multiplication can’t happen; the tip is to use the traceback and fix the dims (and it should look like [64*1024], [1024*1024] or [64*2048], [2048*1024])