Access the weights of manually implemented backward via Autograd

Hello,

l would like to print the weights of a specific layer which is implemented manually with autograd. It is called my_autograd_function().

I tried
print(net.my_autograd_function.weight) but it doesn’t work, “net.my_autograd_function” is not recognized. Here is my code :

class my_autograd_function(torch.autograd.Function):
    def forward(self,W,x):  
        self.save_for_backward(W, x)
        y = torch.mm(W, x)
        return y
    
    def backward(self, grad_output):
        W, x = self.saved_tensors 
        grad_input = grad_output.clone()
        grad_input_dL_dW = torch.mm(grad_input, x.t()) 
        grad_input_dL_dx = torch.mm(W.t(), grad_input )
        return grad_input_dL_dW, grad_input_dL_dx



class model(nn.module):
	def __init__(self, model_parameters):
           # self.layer1
	   # self.layer2
           # self.layer3
         def forward(self,x):
            x=layer1(x)
            x=layer2(x)
	    x=my_autograd_function()(x,Z)
            x=layer3(x)
	    return x

if __name__ == '__main__':

net=model()

print(net.layer1.weight) # it works
print(net.layer2.weight) # it works
print(net.layer3.weight) # it works
print(net.my_autograd_function.weight) # it doesn't work   "net.my_autograd_function" is not recognized