Insert dropout, is it right?

Hy guys, I want to inset a dropout layer, I have some doubts.
Is forward right?

class Identity(nn.Module):
    def __init__(self):
        super().__init__()

    def forward(self, x):
        return x
class Compound_Model_final(nn.Module):

    def __init__(self):
        super().__init__()

        #Load the models scratch for the first machine
        self.model1 = fcn_resnet50(num_classes=9)
        self.model2= _resnet_shallow(pretrained=False)


        #Load the resnet50 for the second machine, pretrained
        self.model3 = models.resnet50(pretrained=True)
        #self.model3.fc = nn.Linear(self.model3.fc.in_features, 4)
        #self.model3.load_state_dict(torch.load("Regressione_ResNet50_cartesian_angle_M95.pth", map_location='cpu'))



        #Get the number of parameters
        n_features_x = self.model2.fc.in_features
        n_features_y = self.model3.fc.in_features
        # Dropout Layer
        self.dropout = nn.Dropout(p=0.5)

        #Layers to utilize for remove FC
        self.fc = nn.Linear((n_features_x + n_features_y), 4)

        #Remove FC
        self.model2.fc = Identity()
        self.model3.fc = Identity()


    def forward(self, x):


        #The first machine is executed
        dist = self.model1(x)['out']
        model2_out = self.model2(dist)


        #The second machine too
        model3_out = self.model3(x)




        #Concatenate the tensors
        z = torch.cat([model2_out, model3_out], 1)




        #I use the linear level
        xyuv = self.fc(z)



        #Return
        return dist, xyuv

You have to call dropout in the forward pass.
Whatever you don’t call in the forward pass won’t be computed

yes I add, before the linear, z= self.dropout(z)