How to add 2 regressor block for last layer in densnet

i want to replace last layer of densnet161 with 2 regressor block each containing ReLU-activated hidden layer of 256 units.

Can anyone suggest me how to replace

You can try maybe…

class MyModel(nn.Module):
   def __init__(self):
      super(MyModel, self).__init__()
      self.densenet = nn.Sequential(*list(torchvision.models.densenet169(pretrained=True).children())[:-1])
      self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
      self.res_blocks = nn.Sequential(
                                        nn.Linear(1664, 256),
                                        nn.ReLU(inplace=True),
                                        nn.Linear(256, 256),
                                        nn.ReLU(inplace=True))
   def forward(self, x):
      # suppose shape of x is (batch, 3, 224, 224)
      x = self.densenet(x) # shape is (batch, 1664, 7, 7)
      x = self.avg_pool(x) # shape is (batch, 1664, 1, 1)
      x = x.reshape(x.shape[0], -1) # shape is (batch, 1664)
      return self.res_blocks(x)

but each unit is directed to output
ie 2 regressor block each containing ReLU-activated hidden layer of 256 units will be continued to output unit
ie 2 outputs are expected

Okay…do you mean like this ?

class MyModel(nn.Module):
   def __init__(self):
      super(MyModel, self).__init__()
      self.densenet = nn.Sequential(*list(torchvision.models.densenet169(pretrained=True).children())[:-1])
      self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
      self.reg_block1 = nn.Sequential(
                                        nn.Linear(1664, 256),
                                        nn.ReLU(inplace=True))
      self.reg_block2 = nn.Sequential(
                                        nn.Linear(1664, 256),
                                        nn.ReLU(inplace=True))
   def forward(self, x):
      # suppose shape of x is (batch, 3, 224, 224)
      x = self.densenet(x) # shape is (batch, 1664, 7, 7)
      x = self.avg_pool(x) # shape is (batch, 1664, 1, 1)
      x = x.reshape(x.shape[0], -1) # shape is (batch, 1664)
      out1 = self.reg_block1(x)
      out2 = self.reg_block2(x)
      return out1, out2

maybe same , thank you .
i will check with this