Doubt in Element 0 of tensors does not require grad and does not have a grad_fn

Hi, I am unable to debug my code with the existing posts so creating a new. Following are the dataloader, model and training code.

Also, in what all ways can a variable be detached from the computation graph

DataLoader

class cifar10(Dataset):
    def __init__(self,image_path, labels_file, transform=None):
        self.image_path = image_path
        self.labels = pd.read_csv(labels_file)
        self.transform = transform
        
    def __len__(self):
        return self.labels.shape[0]
    
    def __getitem__(self, idx):
        image_name = self.labels['filenames'][idx]
        img = io.imread(os.path.join(self.image_path, image_name))
        img = torch.from_numpy(img).float().view(-1).unsqueeze(0).cuda() #image.view(-1).unsqueeze(0).cuda()
        label = self.labels['labels'][idx]
        label = torch.Tensor([label]).long().cuda()

        sample = {'image': img, 'label': label}

        if self.transform:
            sample = self.transform(sample)

        return sample

Model

class cifarnet(nn.Module):
    def __init__(self, num_input, num_hidden, num_output):
        super(cifarnet, self).__init__()
        self.linear1 = nn.Linear(num_input, num_hidden) 
        self.linear2 = nn.Linear(num_hidden, num_output)
        
    def forward(self, x):
        h_relu = F.relu(self.linear1(x))
        y_pred = self.linear2(h_relu)
        return y_pred

training code

for epoch in range(2):  # loop over the dataset multiple times

    running_loss = 0.0
    for i, data in enumerate(train_data):
        # get the inputs; data is a list of [inputs, labels]
        image, label = data['image'], data['label']

#         zero the parameter gradients
        optimizer.zero_grad()

        # forward + backward + optimize
        outputs = net(image)
        loss = criterion(image, label)

        loss.backward()
        optimizer.step()

        # print statistics
        running_loss += loss.item()
        if i % 50 == 0:    # print every 2000 mini-batches
            print('[%d, %5d] loss: %.3f' %
                  (epoch + 1, i + 1, running_loss / 2000))
            running_loss = 0.0

print('Finished Training')

You should pass outputs to the criterion instead of image :wink: