Custom loss function using gradients of intermediate layers

Hello. I am looking to create a complicated custom loss function that uses both the output of the Neural Net and the intermediate gradient from specific layers.
Here is the net

class UGVNet(nn.Module):
    def __init__(self):
        super(UGBNet, self).__init__()
        self.Nt = 30
        self.Nu1 = torch.nn.Linear(Nt, 64)
        self.Nu2 = torch.nn.Linear(64, 2*Nt)
        self.Nx1 = torch.nn.Linear(2*Nt, 64)
        self.Nx2 = torch.nn.Linear(64, 64)
        self.Nx3 = torch.nn.Linear(64, 6*Nt)

    def swish(self, x):
        return x / (1.0 + torch.exp(-x))

    def forward(self, x):
        x=swish(self.Nu1(x))
        x=swish(self.Nu2(x))
        x=swish(self.Nx1(x))
        x=swish(self.Nx2(x))
        x=swish(self.Nx3(x))
        return x



Does anyone have any pointers or examples about how to do that? I need explicit access to intermediate gradients from some of the layers.

Thank you.