Hi, here is a minimal sample code that can reproduce the error. Please check it.

I find out that the error is related to at `out = out + self.shortcut(x)`

when `len(self.shortcut) == 0`

and `out`

is outputed by the inplace op, what’s the reason behind it?

```
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
self.bn2 = nn.BatchNorm2d(planes)
else:
#self.n2 = nn.BatchNorm2d(planes) #use name `n2` to keep it `nn.BatchNorm2d` when `len(self.shortcut) == 0`
#can avoid the gradient error, but why?
self.bn2 = nn.BatchNorm2d(planes) #in this case, 'bn2' will be repaced with `my_func*` when `len(self.shortcut) == 0`
# and will cause the gradient error.
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.conv2(out)
if len(self.shortcut):
out = self.bn2(out)
else:
out = self.bn2(out)
#out = self.n2(out)
out = out + self.shortcut(x)
out = F.relu(out, inplace=True)
return out
```