Backward with Autograd

Hi,

I am looking for an example to do the complete backward with autograd.grad instead of loss.backward for a network like this:

class DeepNN(nn.Module):
    def __init__(self):
        super(DeepNN, self).__init__()
        self.fc_1 = nn.Linear(D_in, H)
        self.fc = nn.Linear (H,H)
        self.fc_end = nn.Linear(H,D_out)

        self.module_list = []

        self.module_list.append(self.fc_1)
        for i in range(num_hidden):
            self.module_list.append(nn.Linear (H,H))

        self.layers = nn.ModuleList(self.module_list)
        self.f = nn.Sequential(*self.module_list)
        self._name = "DeepNN"
        
    
    def forward (self, x):
 
        return self.f(x)

Can anyone help me with that?
Thanks.