Residual Connection breaks or not while calling separately

My question is that does the connection of residual break here? I have a concern that do I need to give residual connection again to make it work like resnet18?

class preprocessor(nn.Module):
    def __init__(self,num_channels=3):
        super(preprocessor,self).__init__()
        
        self.conv1 = nn.Conv2d(3,192,1)
        self.resnet = models.resnet18(pretrained=True)
        self.resnet.aux_logits=False
        self.conv2 = nn.Conv2d(64,1,1)
        self.dropout= torch.nn.Dropout2d(p=0.5, inplace=False)
        self.seb = SEBlock(64)
        self.sp = SpatialAttention(7)

    def forward(self,x):

        identity = x[:,2:3,:,:]
        b,c,h,w = x.shape

        x = self.resnet.conv1(x)
        x = self.resnet.bn1(x)
        x = self.resnet.maxpool(x)

        #layer1
        x = self.resnet.layer1[0].conv1(x)
        x = self.resnet.layer1[0].bn1(x)
        x = self.resnet.layer1[0].conv2(x)
        x = self.resnet.layer1[0].bn2(x)
        x = self.resnet.layer1[1].conv1(x)
        x = self.resnet.layer1[1].bn1(x)
        x = self.resnet.layer1[1].conv2(x)
        x = self.resnet.layer1[1].bn2(x)
        x = self.dropout(x)
        spa = self.sp(x)
        x = x+spa
        x = self.seb(x)
        x = F.interpolate(self.conv2(x),(h,w))
        x = identity+x
        return x

I don’t fully understand the question as your code already shows x = identity + x, which could be seen as a residual connection. resnet18 does not have an aux_logits attribute so you are creating it and are initializing it to False.