I have defined a custom loss function but the loss function is not decreasing, not even changing.

my loss function aims to minimize the inverse of gap statistic which is used to evaluate the cluster formed from my embeddings.

this is a toy code:

```
def get_data():
Xlist = []
for i in range(6):
X, _ = make_blobs(n_samples=1000, n_features=2)
Xlist.append(X)
dat = np.stack(np.array(Xlist), axis=2)
dat = torch.from_numpy(np.reshape(dat,(-1))).float()
return dat
def custom_loss_function(gap):
loss = torch.sum([1./g for g in gap])
return loss
def calculate_Wk(X):
Sum_of_squared_distances = []
for k in [10, 50, 150, 300, 500]:
optics = OPTICS(min_samples=k).fit(X)
Sum_of_squared_distances.append(sum(optics.core_distances_))
sum_of_squared_dist = np.log(np.array(Sum_of_squared_distances))
normalized_wcd = sum_of_squared_dist - np.max(sum_of_squared_dist)
return normalized_wcd
def calculate_gap(output,x, Wks, s_prime):
list_of_G = []
Wks = torch.tensor(Wks)
output = torch.tensor(output)
x = np.reshape(x, (1000, 2, 6))
for i, weight in enumerate(output):
Wk = calculate_Wk(weight * x[:,:,i])
Wk = torch.tensor(Wk)
G = Wks - Wk
optimum = 0
for i in range(0, len(G)):
if(G[i] >= G[i+1] + s_prime[i+1]):
optimum = i+1
break
if optimum == 0:
optimum = 1
list_of_G.append(G[optimum])
return list_of_G
def get_reference_data():
e = np.finfo(float).eps
simulated_Wk = np.zeros((20, 5)) + e
simulated_sk = np.zeros((20, 5)) + e
for i in range(20):
temp_wk = []
temp_sk = []
X = np.random.uniform(0, 1, size=(1000, 2))
simulated_Wk[i] = calculate_Wk(X)
Wks = np.mean(simulated_Wk + e, axis=0)
sks = np.std(simulated_Wk + e, axis=0) * np.sqrt(1 + 1/20)
return Wks, sks
Wks, s_prime = get_reference_data()
in_features = 12000
out_features = 6
model = nn.Sequential(nn.Linear(in_features, out_features))
optimizer = torch.optim.SGD(model.parameters(), lr=0.001)
x = get_data()
for n in range(10):
output = model(x)
#This is the part where i calculate the gap statistic that i want to minimize.
list_of_gaps = calculate_gap(output, x, Wks, s_prime)
loss = custom_loss_func(list_of_gaps)
loss = Variable(loss, requires_grad = True)
loss.backward()
optimizer.step()
```

The loss is not even changing, my model isnt learning anything. Can you help me out with this?