Inplace operation about activation function

My code is like that

class Net(nn.Module):
    def __init__(self, upscale_factor):
        super(Net, self).__init__()

        # self.activation = nn.ReLU()
        # self.gain = "relu"
        self.activation = nn.Tanh()
        self.gain = "tanh"
        self.image_conv1 = nn.Conv2d(5, 20, 5, padding=2)
        self.image_conv2 = nn.Conv2d(20, 20, 5, padding=2)
        self.low_conv1 = nn.Conv2d(5, 15, 3, padding=1)

        image_feature_1 = self.activation(self.image_conv1(image))
        image_feature_2 = self.activation(self.image_conv2(image_feature_1))

When I use Tanh function, it throw the error

RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation

However, when I use relu function, everything works well.

Is there any difference between these two activation function?

Could you modify your code so that the error is thrown?
I tried your code and it runs just fine:

class Net(nn.Module):
    def __init__(self,):
        super(Net, self).__init__()
        self.activation = nn.Tanh()
        self.image_conv1 = nn.Conv2d(5, 20, 5, padding=2)
        self.image_conv2 = nn.Conv2d(20, 20, 5, padding=2)
        
    def forward(self, x):
        x = self.image_conv1(x)
        x = self.activation(x)
        x = self.image_conv2(x)
        x = self.activation(x)
        return x

model = Net()
x = torch.randn(1, 5, 24, 24)
output = model(x)
output.mean().backward()