Imposing loss on intermediate activations of resnet

From @ptrblck 's answer here, I know that intermediate outputs can be used to compute a loss. However, if I swap the simple model there with a ResNet (which has more complicated operations like skip connections) then there is an autograd error saying

RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.FloatTensor [1, 512, 4, 4]], which is output 0 of ReluBackward0, is at version 1; expected version 0 instead. Hint: enable anomaly detection to find the operation that failed to compute its gradient, with torch.autograd.set_detect_anomaly(True).

Minimal code for this:

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

def conv3x3(in_planes, out_planes, stride=1):
    return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)


class BasicBlock(nn.Module):
    expansion = 1

    def __init__(self, in_planes, planes, stride=1):
        super(BasicBlock, self).__init__()
        self.conv1 = conv3x3(in_planes, planes, stride)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = conv3x3(planes, planes)
        self.bn2 = nn.BatchNorm2d(planes)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion*planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(self.expansion*planes)
            )

    def forward(self, x):
        out = F.relu(self.bn1(self.conv1(x)))
        out = F.relu(self.bn2(self.conv2(out)))
        out += self.shortcut(x) if self.shortcut else x
        out = F.relu(out)
        return out

class ResNet(nn.Module):
    def __init__(self, block, num_blocks, num_classes=10, return_feat=False):
        super(ResNet, self).__init__()
        self.in_planes = 64

        self.conv1 = conv3x3(3,64)
        self.bn1 = nn.BatchNorm2d(64)
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.linear = nn.Linear(512*block.expansion, num_classes)
        self.return_feat = return_feat

    def _make_layer(self, block, planes, num_blocks, stride):
        strides = [stride] + [1]*(num_blocks-1)
        layers = []
        for stride in strides:
            layers.append(block(self.in_planes, planes, stride))
            self.in_planes = planes * block.expansion
        return nn.Sequential(*layers)

    def forward(self, x):
        out = F.relu(self.bn1(self.conv1(x)))
        out = self.layer1(out)
        out = self.layer2(out)
        out = self.layer3(out)
        out = self.layer4(out)
        layer4_out = out
        out = F.avg_pool2d(out, 4)
        out = out.view(out.size(0), -1)
        out = self.linear(out)
        if self.return_feat:
            return out, layer4_out
        else:
            return out

def ResNet18(num_classes=10, return_feat=False):
    return ResNet(BasicBlock, [2,2,2,2], num_classes, return_feat)


x = torch.randn(1, 3, 32, 32)
model = ResNet18(return_feat=True)
output, aux = model(x)
loss = (aux**2).mean()
loss.backward()

Any ideas which particular operation is causing the in-place operation that breaks the graph?
Thanks

I don’t believe this issue is related to the usage of intermediate activations as your code will also fail when the output is used:

x = torch.randn(1, 3, 32, 32)
model = ResNet18(return_feat=True)
output, aux = model(x)
loss = (output**2).mean()
#loss = (aux.clone()**2).mean()
loss.backward()

which points to:

out += self.shortcut(x) if self.shortcut else x

You could replace this operation with the out-of-place version:

out = out + self.shortcut(x) if self.shortcut else x

which seems to work.

1 Like

Thanks a lot, that worked. Just a minor edit to the solution for others having a similar issue:

out = out + self.shortcut(x) if self.shortcut else (out + x)

i.e. the else case would also have out+.