Using custom layer which has non trainable parameters as last layer in network

Hi there,
I have created a custom layer which has non trainable parameters and it calculate the probability of instances like this:

class MyLastLayer(nn.Module):
    def __init__(self, num_classes):
        super(MyLastLayer, self).__init__()
        self.num_classes = num_classes

    def forward(self, fx, m, precision, c, count):
        logPost = torch.from_numpy(- np.ones((fx.shape[0], self.num_classes)) * np.inf)
        for t in range(fx.shape[0]):
            for i in range(self.num_classes):
                if count[i] != 0:
                    a = fx[t] - torch.from_numpy(m[i])
                    a = torch.unsqueeze(a, dim=1)
                    aT = a.t()
                    preTensor = torch.from_numpy(precision[i])
                    logPost[t, i] = c[i][0] - torch.mm(torch.mm(aT, preTensor), a) / 2 + np.log(count[i])[0]
        return torch.argmax(logPost, dim=1)

Then, I apply the custom layer in my network as below:

class Classifier(nn.Module):
    def __init__(self, batch_size, ncls, out_dim):
        super(Classifier, self).__init__()
        self.m = np.zeros((ncls, out_dim))
        self.c = np.zeros((ncls, 1))
        self.precision = np.zeros((ncls, out_dim, out_dim))
        self.count = np.ones((ncls, 1))

        self.model = nn.Sequential(
            nn.Linear(int(np.prod(img_shape)), 512),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(512, 256),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(256, 128),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(128, 64)
        )

        self.classifier = MyLastLayer(ncls)

    def forward(self, img, label):
        img_flat = img.view(img.size(0), -1)
        feas = self.model(img_flat)
        out = self.classifier(feas, self.m, self.precision, self.c, self.count)
        return out

When I run the code, I got this error:

RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn

My question is that How can my network do backward through the last layer to the first layer?

Any help would be appreciated!