Custom Clipped ReLu Activation

Dear All,
Here is my code for Clipped ReLU. Do I mistake? I am not sure about the backward part.
class MyReLU(torch.autograd.Function):
@staticmethod
def forward(ctx,input):
output = input.clone()
input.clamp(min=0,max=1)
ctx.save_for_backward(input)
return output
@staticmethod
def backward(ctx, grad_output):
input, = ctx.saved_tensors
grad_input = grad_output.clone()
grad_input[input < 0] = 0
grad_input[input > 1] = 0
return grad_input
dtype = torch.float
device = torch.device(“cpu”)
relu = MyReLU().apply