# How to compute gradient for conditionals?

I want to create mask from score threshold and multiply it with weight matrix.
To create mask I’m using condition shown in code. but for all score parameters grad are zero.
How to compute gradient for score parameters ?

``````class SuperMaskMLP(torch.nn.Module):

def __init__(self,num_features,hidden_nodes_list,num_classes):

num_hidden_layes = len(hidden_nodes_list)
self.hidden = torch.nn.ModuleList()
self.score = torch.nn.ModuleList()

if num_hidden_layes==0:
self.hidden.append(torch.nn.Linear(num_features, num_classes))
self.score.append(torch.nn.Linear(num_features, num_classes))
else:
self.hidden.append(torch.nn.Linear(num_features, hidden_nodes_list[0]))
self.score.append(torch.nn.Linear(num_features, hidden_nodes_list[0]))
for k in range(num_hidden_layes-1):
self.hidden.append(torch.nn.Linear(hidden_nodes_list[k], hidden_nodes_list[k+1]))
self.score.append(torch.nn.Linear(hidden_nodes_list[k], hidden_nodes_list[k+1]))
self.hidden.append(torch.nn.Linear(hidden_nodes_list[num_hidden_layes-1], num_classes))
self.score.append(torch.nn.Linear(hidden_nodes_list[num_hidden_layes-1], num_classes))

# input : features
# output: logits , probabilities
def forward(self, x):
out = x
i=0
for layer in self.hidden[:-1]:
out = torch.matmul(out,torch.t(w)) + (b)
out = F.relu(out)
i+=1