I try to defining custom leaky_relu function base on autograd, but the code shows “function MyReLUBackward returned an incorrect number of gradients (expected 2, got 1)”, can you give me some advice?
Thank you so much for your help.
the code as shown:
import torch
from torch.autograd import Variable
import math
class MyReLU(torch.autograd.Function):
@staticmethod
def forward(ctx, input,negative_slope):
output = input.clamp(min=0)+input.clamp(max=0)*negative_slope
ctx.save_for_backward(input)
return output
@staticmethod
def backward(ctx, grad_output):
input, = ctx.saved_tensors
negative_slope, = ctx.saved_tensors
grad_input = grad_output.clone()
return grad_input
dtype = torch.float
N, D_in, H, D_out = 64, 1000, 100, 10
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)
w1 = torch.randn(D_in, H, device=device, dtype=dtype, requires_grad=True)
w2 = torch.randn(H, D_out, device=device, dtype=dtype, requires_grad=True)
learning_rate = 1e-6
for t in range(500):
relu = MyReLU.apply
y_pred = relu(x.mm(w1),0.01).mm(w2)
loss = (y_pred - y).pow(2).sum()
if t % 100 == 99:
print(t, loss.item())
loss.backward()
with torch.no_grad():
w1 -= learning_rate * w1.grad
w2 -= learning_rate * w2.grad
w1.grad.zero_()
w2.grad.zero_()