Hi, I want to know why the loss is not decreasing? Bascially it’s always the initial loss. Thanks in advance!
optimizer.zero_grad()
# forward
output = net(imgdata)
_, preds = torch.max(output.data,1)
for i in range(output.size()[1]):
loss = criterion(output[:,:,:,i],labels.type(torch.LongTensor)[:,:,i])
print('Loss per batch ', loss.data[0])
if phase == 'train':
loss.backward()
optimizer.step()
next = batch_indices
running_loss += loss.data[0]