I got strange ValueError from dataloader

Hello,I got a strange error

<ipython-input-154-3032f9891941> in loss_epoch(model, loss_func, dataset_dl, sanity_check, opt)
      5     len_data=len(dataset_dl.dataset)
      6 
----> 7     for xb,clb,yb in dataset_dl:
      8         # convert to tensor
      9         yb=torch.stack(yb,1)

ValueError: too many values to unpack (expected 3)

It worked 10 minutes ago, but now doesn’t, I just stuck there.
Yes, it is a classification and regression task, so I need 3 outputs from dl.
My loss:

def loss_epoch(model,loss_func,dataset_dl,sanity_check=False,opt=None):
    running_loss=0.0
    running_metric=0.0
    accuracy=0.0
    len_data=len(dataset_dl.dataset)
    for xb,clb,yb in dataset_dl:
        # convert to tensor
        yb=torch.stack(yb,1)
        yb=yb.type(torch.float32).to(device)
        
        # get model output
        output1,output2=model(xb.to(device))
        
        # get loss per batch
        loss_b,metric_b=loss_batch(loss_func, output2, yb, opt)

        loss2= nn.CrossEntropyLoss()
        clb=clb.cuda()
        acc=(torch.argmax(output1, dim=1)==clb).sum()/float(len(output1))
        #print("OUTPUT1",output1)
        #print("clb",clb)
        #print("OUTPUT2",output2)
        loss2_out = loss2(output1, clb)
        accuracy+=acc
        # update running loss
        running_loss+=loss_b
        # update running metric
        if metric_b is not None:
            running_metric+=metric_b

    # average loss value
    loss=running_loss/float(len_data)+ loss2_out/float(len_data)
    accuracy=accuracy/float(len_data)
    # average metric value
    metric=running_metric/float(len_data)
    
    return loss, metric,accuracy

My Dataloader:

    class New_dataset(Dataset):
        def __init__(self, path2data, transform, trans_params):      
          self.labels = labels_df[["xmin","xmax","ymin","ymax"]].values
    
          
          self.imgName=labels_df["image_name"]
          self.ids=labels_df.index
    
          self.fullPath2img=labels_df["path_to_img"]
          self.transform = transform
          self.trans_params=trans_params    
          self.classid=labels_df['class']
        def __len__(self):
            t
            return len(self.labels)
          
        def __getitem__(self, idx):
          
          image = Image.open(self.fullPath2img[idx])  
          label= self.labels[idx]
          idclass=self.classid[idx]
    
          
          image,label = self.transform(image,label,self.trans_params)
    
          return image,idclass,label

I need 3 outputs from batch DL to calculate 2 losses. It is strange because it worked while i defining and fixing loss function.
Maybe there is another error?

Could you check, if you have accidentally overridden the dataset_dl with another object, which doesn’t return these three values?
Also check, if the underlying dataset still returns the three values via a, b, c = dataset_dl.dataset[0].