CrossEntropy in Pytorch getting Target 1 out of bounds

Why does CrossEntropy throw this error. Im new to pytorch and deeplearning in general so not sure what is really going on here.

import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import TensorDataset, DataLoader
import torch.optim as optim
import pandas as pd
import numpy as np

df = pd.read_csv('housepricedata.csv')

dataset = df.values
X = dataset[:,0:10]
y = dataset[:, 10]
len(X[0])

from sklearn import preprocessing
min_max = preprocessing.MinMaxScaler()
x_scale = min_max.fit_transform(X)

from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(x_scale, y, test_size=0.3)

X_train = torch.FloatTensor(X_train)
X_test = torch.FloatTensor(X_test)
y_train = torch.LongTensor(y_train)
y_test = torch.LongTensor(y_test)

trainD = TensorDataset(X_train, y_train)
testD = TensorDataset(X_test, y_test)

class Model(nn.Module):
    def __init__(self, inp1=10, out=1):
        super().__init__()
        self.Dense1 = nn.Linear(inp1, 32)
        self.Dense2 = nn.Linear(32, 32)
        self.out = nn.Linear(32, out)

    def forward(self, x):
        x = F.relu(self.Dense1(x))
        x = F.relu(self.Dense2(x))
        x = self.out(x)
        return x

model = Model()

trainloader = DataLoader(trainD, batch_size=28, shuffle=False)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
epoch = 500
losses = []
for i in range(epoch):
    for data in trainloader:
        X, y = data
        optimizer.zero_grad()
        output = model(X)
        loss = criterion(output, y)
        losses.append(loss)
        loss.backward()
        optimizer.step()

This is the error it throws:

IndexError                                Traceback (most recent call last)
 in 
      6         optimizer.zero_grad()
      7         output = model(X)
----> 8         loss = criterion(output, y)
      9         losses.append(loss)
     10         loss.backward()

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    530             result = self._slow_forward(*input, **kwargs)
    531         else:
--> 532             result = self.forward(*input, **kwargs)
    533         for hook in self._forward_hooks.values():
    534             hook_result = hook(self, input, result)

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/modules/loss.py in forward(self, input, target)
    914     def forward(self, input, target):
    915         return F.cross_entropy(input, target, weight=self.weight,
--> 916                                ignore_index=self.ignore_index, reduction=self.reduction)
    917 
    918 

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/functional.py in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction)
   2019     if size_average is not None or reduce is not None:
   2020         reduction = _Reduction.legacy_get_string(size_average, reduce)
-> 2021     return nll_loss(log_softmax(input, 1), target, weight, None, ignore_index, None, reduction)
   2022 
   2023 

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/functional.py in nll_loss(input, target, weight, size_average, ignore_index, reduce, reduction)
   1836                          .format(input.size(0), target.size(0)))
   1837     if dim == 2:
-> 1838         ret = torch._C._nn.nll_loss(input, target, weight, _Reduction.get_enum(reduction), ignore_index)
   1839     elif dim == 4:
   1840         ret = torch._C._nn.nll_loss2d(input, target, weight, _Reduction.get_enum(reduction), ignore_index)

IndexError: Target 1 is out of bounds.
[-]


[36]


epochs = 500
losses = []

for i in range(epochs):
    i =+1
    y_pred = model.forward(X_train)
    for data in data
    loss = criterion(y_pred, y_train)
    losses.append(loss)


---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
 in 
      5     i =+1
      6     y_pred = model.forward(X_train)
----> 7     loss = criterion(y_pred, y_train)
      8     losses.append(loss)
      9 

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    530             result = self._slow_forward(*input, **kwargs)
    531         else:
--> 532             result = self.forward(*input, **kwargs)
    533         for hook in self._forward_hooks.values():
    534             hook_result = hook(self, input, result)

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/modules/loss.py in forward(self, input, target)
    914     def forward(self, input, target):
    915         return F.cross_entropy(input, target, weight=self.weight,
--> 916                                ignore_index=self.ignore_index, reduction=self.reduction)
    917 
    918 

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/functional.py in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction)
   2019     if size_average is not None or reduce is not None:
   2020         reduction = _Reduction.legacy_get_string(size_average, reduce)
-> 2021     return nll_loss(log_softmax(input, 1), target, weight, None, ignore_index, None, reduction)
   2022 
   2023 

/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/torch/nn/functional.py in nll_loss(input, target, weight, size_average, ignore_index, reduce, reduction)
   1836                          .format(input.size(0), target.size(0)))
   1837     if dim == 2:
-> 1838         ret = torch._C._nn.nll_loss(input, target, weight, _Reduction.get_enum(reduction), ignore_index)
   1839     elif dim == 4:
   1840         ret = torch._C._nn.nll_loss2d(input, target, weight, _Reduction.get_enum(reduction), ignore_index)

IndexError: Target 1 is out of bounds.

nn.CrossEntropyLoss is used for a multi-class classification, while your model outputs the logits for a single class.
If you are dealing with a binary classification, you could use nn.BCEWithLogitsLoss, or output two logits and keep nn.CrossEntropyLoss.

3 Likes

Thank you, Yep forgot to update here, I changed it to have two outputs. Sorry for not updating it here.
Also thank you for pointing me to BCEWithLogitsLoss will give it a try. Thank you once again