"leaf variable was used in an inplace operation: in autograd func. with multiple inputs

I am writing a piece of code to better understand how autograd and pytorch pipeline in general works. When I define a new layer using torch.autograd.Function class with 4 inputs I get RuntimeError: leaf variable was used in an inplace operation error. When I define the same class but with only 3 inputs I get the correct output.

This is the layer I am defining in its simplest form:

class ExpRecR_(torch.autograd.Function):

    @staticmethod
    def forward(ctx, input, p1, p2, p3):
        ctx.input = input
        ctx.p1 = p1
        ctx.p2 = p2
        ctx.p3 = p3

        ctx.mark_dirty(input)
        ctx.mark_dirty(p1)
        ctx.mark_dirty(p2)
        ctx.mark_dirty(p3)
        loss = 0
        
        return torch.Tensor([loss])
    
    @staticmethod
    def backward(ctx, grad_output):
        # saved tensors - tuple of tensors, so we need get first
        input = ctx.input
        p1 = ctx.p1
        p2 = ctx.p2
        p3 = ctx.p3
        
        out = torch.zeros(1,1)
        out_p2 = torch.zeros(1,1)
        out_p3 = torch.zeros(1,1)
        return None, Variable(out)*grad_output, Variable(out_p2)*grad_output, Variable(out_p3)*grad_output

and I check this layer using torch.autograd.gradcheck

test = gradcheck(ExpRecR_.apply, (Variable(timestamp_pt,),
                                 nn.Parameter(-torch.ones(1, 1)*0.2),
                                 nn.Parameter(torch.ones(1, 1)*0.2),
                                 nn.Parameter(torch.ones(1, 1)*0.1)) , eps=1e-3, atol=1e-4)

this produces following error:

RuntimeError                              Traceback (most recent call last)
<ipython-input-144-b5c59cb4deb3> in <module>()
      2                                  nn.Parameter(-torch.ones(1, 1)*0.2),
      3                                  nn.Parameter(torch.ones(1, 1)*0.2),
----> 4                                  nn.Parameter(torch.ones(1, 1)*0.1)) , eps=1e-3, atol=1e-4)

~/anaconda/envs/py36/lib/python3.6/site-packages/torch/autograd/gradcheck.py in gradcheck(func, inputs, eps, atol, rtol, raise_exception)
    174             return _as_tuple(func(*input))[i].data
    175 
--> 176         analytical, reentrant, correct_grad_sizes = get_analytical_jacobian(_as_tuple(inputs), o)
    177         numerical = get_numerical_jacobian(fn, inputs, inputs, eps)
    178 

~/anaconda/envs/py36/lib/python3.6/site-packages/torch/autograd/gradcheck.py in get_analytical_jacobian(input, output)
    108         for jacobian_c in (jacobian, jacobian_reentrant):
    109             zero_gradients(input)
--> 110             output.backward(grad_output, create_graph=True)
    111             for jacobian_x, (d_x, x) in zip(jacobian_c, iter_variables(input)):
    112                 if d_x is None:

~/anaconda/envs/py36/lib/python3.6/site-packages/torch/autograd/variable.py in backward(self, gradient, retain_graph, create_graph, retain_variables)
    165                 Variable.
    166         """
--> 167         torch.autograd.backward(self, gradient, retain_graph, create_graph, retain_variables)
    168 
    169     def register_hook(self, hook):

~/anaconda/envs/py36/lib/python3.6/site-packages/torch/autograd/__init__.py in backward(variables, grad_variables, retain_graph, create_graph, retain_variables)
     97 
     98     Variable._execution_engine.run_backward(
---> 99         variables, grad_variables, retain_graph)
    100 
    101 

RuntimeError: leaf variable was used in an inplace operation

When I use the layer like this:

gradcheck(ExpRecR_.apply, (Variable(timestamp_pt,),
                                 nn.Parameter(-torch.ones(1, 1)*0.2),
                                 nn.Parameter(torch.ones(1, 1)*0.2),
                                 Variable(torch.ones(1, 1)*0.1)) , eps=1e-3, atol=1e-4)

I get True, which is the correct output.

Can someone help me clarify why I get this error, is there a limit on how many inputs the layer can have?