ValueError: outputs must be a Tensor or an iterable of Tensors

i was trying a multi label classification problem using mixed precision,if i use nn.BCEWithLogitsLoss() then it works fine but when i try to use below custom loss :

def _check_input_type(x, y, loss):
    if loss in ['MSELoss', 'MAELoss', 'huber']:
        if x.shape[-1] == 1:
            return x.squeeze(), y.float()
            return x, y.float()
    elif loss in ['CrossEntropy']:
        return x, y.long()
        return x, y
def soft_cross_entropy_loss(logits, targets, weights=1, reduction='none'):
    if len(targets.shape) == 1 or targets.shape[1] == 1:
        onehot_targets = torch.eye(logits.shape[1])[targets].to(logits.device)
        onehot_targets = targets
    loss = -torch.sum(onehot_targets * F.log_softmax(logits, 1), 1)
    if reduction == 'none':
        return loss
    elif reduction == 'sum':
        return loss.sum()
    elif reduction == 'mean':
        return loss.mean()
def ousm(logits, targets, indices=None):
    logits, targets = _check_input_type(logits, targets, 'CrossEntropy')
    bs = logits.shape[0]
    k = 5
    if bs - k > 0:
      losses = soft_cross_entropy_loss(logits, targets)
      if len(losses.shape) == 2:
          losses = losses.mean(1)
      _, idxs = losses.topk(bs-k, largest=False)
      losses = losses.index_select(0, idxs)

      return losses.mean()
it gives the following error: 

0%|          | 0/12040 [00:06<?, ?it/s]
Traceback (most recent call last):

  File "C:\Users\Mobassir\", line 660, in <module>
    loss_train = train_func(train_loader,scheduler = scheduler)

  File "C:\Users\Mobassir\", line 288, in train_func

  File "C:\Users\Mobassir\anaconda3\envs\kaggle\lib\site-packages\torch\cuda\amp\", line 188, in scale
    return apply_scale(outputs)

  File "C:\Users\Mobassir\anaconda3\envs\kaggle\lib\site-packages\torch\cuda\amp\", line 186, in apply_scale
    raise ValueError("outputs must be a Tensor or an iterable of Tensors")

ValueError: outputs must be a Tensor or an iterable of Tensors


1 Like