Shape handling in two Networks Error is resolved

Error is resolved

N1

class ResNet(nn.Module):
    def __init__(self, block, num_blocks):
        super(ResNet, self).__init__()
        self.in_planes = 64

        self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
        self.bn1 = nn.BatchNorm2d(64)
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.linear = nn.Linear(512*block.expansion, 1)

    def _make_layer(self, block, planes, num_blocks, stride):
        strides = [stride] + [1]*(num_blocks-1)
        layers = []
        for stride in strides:
            layers.append(block(self.in_planes, planes, stride))
            self.in_planes = planes * block.expansion
        return nn.Sequential(*layers)

    def forward(self, x):
        out = F.relu(self.bn1(self.conv1(x)))
        out = self.layer1(out)
        out = self.layer2(out)
        out = self.layer3(out)
        out = self.layer4(out)
        out = F.avg_pool2d(out, 4)
        out = out.view(out.size(0), -1)
        out = self.linear(out)
        
        #print("Output of N1:" , out.shape)
        return out```

N2 is 

class ResNetFullyConnected(nn.Module):
def init(self, block, num_blocks, num_classes=10):
super(ResNetFullyConnected, self).init()

    self.in_planes = 64
    num_neurons = 64  # replace with the number of neurons you need
    self.flatten = nn.Flatten()  # Add a Flatten layer
    self.fc1 = nn.Linear(32*32*3, num_neurons)  
    self.bn1 = nn.BatchNorm1d(num_neurons)  
    self.layer1 = self._make_layer(block, num_neurons, num_neurons, num_blocks[0])
    self.layer2 = self._make_layer(block, num_neurons, num_neurons, num_blocks[1])
    self.layer3 = self._make_layer(block, num_neurons, num_neurons, num_blocks[2])
    self.layer4 = self._make_layer(block, num_neurons, num_neurons, num_blocks[3])
    self.fc2 = nn.Linear(num_neurons, num_classes)

def _make_layer(self, block, in_features, out_features, num_blocks):
    layers = []
    for _ in range(num_blocks):
        layers.append(block(in_features, out_features))
    return nn.Sequential(*layers)

def forward(self, x):
    out = F.relu(self.bn1(self.fc1(self.flatten(x))))
    out = self.layer2(out)
    out = self.layer3(out)
    out = self.layer4(out)
    out = self.fc2(out)
    return out
Code of Adapter 

class Adapter(nn.Module):
def init(self, in_features, out_features):
super(Adapter, self).init()

    self.linear = nn.Linear(in_features, out_features)

def forward(self, x):
    #x = x.flatten(start_dim=1)  # Reshape the input tensor
    #x = x.view(x.size(0), -1)  # Flatten the input tensor
    #x = self.flatten(x)
    #x = x.reshape(x.size(0), -1) 
    
   
    x = self.linear(x)
    return x

adapter = Adapter(512, 3072).cuda()


The error is  mat1 and mat2 shapes cannot be multiplied (128x1 and 512x3072)