Extract features and outputs from InceptionV3 using a custom class

I would like to implement a custom class to modify the fully connected layers of Inception V3 and extract outputs and features, similar to this FCResNet50 class:

class FCResNet50(nn.Module):    
    def __init__(self, num_classes=2, pretrained=True, hidden_size=2048, dropout=0.5):
        super().__init__()
        self.resnet = resnet50(pretrained=pretrained)                
        self.resnet.fc = nn.Linear(2048, hidden_size)
        self.fc = nn.Linear(hidden_size, num_classes)
        self.relu = nn.ReLU()
        self.dropout = nn.Dropout(dropout)        

    def require_all_grads(self):
        for param in self.parameters():
            param.requires_grad = True

    def forward(self, x):
        features = self.resnet(x)
        outputs = self.fc(self.dropout(self.relu(features)))

        return outputs, features

Below is what I have written so far, but I’m not sure how to complete the forward function. Any guidance would be greatly appreciated. Thank you.

class FCInceptionV3(nn.Module):    
    def __init__(self, num_classes=2, pretrained=True, hidden_size=2048, dropout=0.5):
        super().__init__()
        self.inception = models.inception_v3(pretrained=pretrained)
        
        # Modify the final fully connected layer of the InceptionV3 model
        num_ftrs = self.inception.fc.in_features
        self.inception.fc = nn.Linear(num_ftrs, hidden_size)
        
        # Add a custom fully connected layer for the primary net
        self.inception.fc = nn.Linear(num_ftrs, 2)
        self.relu = nn.ReLU()
        self.dropout = nn.Dropout(dropout)
  

    def require_all_grads(self):
        for param in self.parameters():
            param.requires_grad = True

    def forward(self, x):
        # Forward pass through the inception model
        outputs = self.inception(x)

        return outputs

I’m unsure where exactly you are stuck as you should be able to use the same approach you’ve applied for the resnet.
I.e. replace the .fc layer with a custom one (you are doing it twice for some reason) and call into additional layers afterwards.

Thank you for your reply!

I’ve tried this, following the resnet approach :

import torch.nn as nn
import torchvision.models as models
class FCInceptionV3(nn.Module):    
    def __init__(self, num_classes=2, pretrained=True, hidden_size=2048, dropout=0.5):
        super().__init__()
        self.inception = models.inception_v3(pretrained=pretrained)
        
        # Modify the final fully connected layer of the InceptionV3 model
        num_ftrs = self.inception.fc.in_features
        self.fc = nn.Linear(num_ftrs, hidden_size)
        
        # Add a custom fully connected layer for the primary net
        self.fc = nn.Linear(hidden_size, 2)
        self.relu = nn.ReLU()
        self.dropout = nn.Dropout(dropout)
  

    def require_all_grads(self):
        for param in self.parameters():
            param.requires_grad = True

    def forward(self, x):
        # Forward pass through the inception model
        features = self.inception(x)
        outputs = self.fc(self.dropout(self.relu(features)))

        return outputs, features

But I got this error: TypeError: relu(): argument ‘input’ (position 1) must be Tensor, not InceptionOutputs

Swapping the forward function as follows seemed to fix the previous error but results in another error:

def forward(self, x):
        # Forward pass through the inception model
        features = self.inception(x)
        
        # Apply ReLU activation to individual tensors within features
        features = [self.relu(feature) for feature in features]
        
        # Process the last feature map
        last_feature_map = features[-1]
        
        outputs = self.fc(self.dropout(self.relu(last_feature_map)))

        return outputs, features

RuntimeError: mat1 and mat2 shapes cannot be multiplied (2x1000 and 2048x2)

Index the output logits via outputs.logits and it should work.