Related error not enough values to unpack (expected 2, got 1)

I am trying to create a classification class for pertained model xlnet.

below my code:
‘’'class check(torch.nn.Module):
def init(self,num_labels=2):
super(check, self).init()
self.num_labels = num_labels
#self.xlnet = XLNetModel(config)
self.xlnet = XLNetModel.from_pretrained(PRE_TRAINED_MODEL_NAME)
self.drop = nn.Dropout(p=0.3)
self.classifier = torch.nn.Linear(768, num_labels)
torch.nn.init.xavier_normal_(self.classifier.weight)
def forward(self,input_ids=None,token_type_ids=None,attention_mask=None,labels=None):
_, pooled_output = self.xlnet(input_ids=input_ids,attention_mask=attention_mask,token_type_ids=token_type_ids,labels=labels)
print(pooled_output[0])
output = self.drop(pooled_output)
return self.out(output)

class Classification(Dataset):

def __init__(self, texts, labels, tokenizer, max_len):
    self.texts = texts
    self.labels = labels
    self.tokenizer = tokenizer
    self.max_len = max_len

def __len__(self):
    return len(self.texts)

def __getitem__(self, item):
    text = str(self.texts[item])
    label = self.labels[item]

    encoding = self.tokenizer.encode_plus(
    text,
    add_special_tokens=True,
    max_length=self.max_len,
    return_token_type_ids=False,
    pad_to_max_length=False,
    return_attention_mask=True,
    return_tensors='pt',
    )

    

    input_ids = pad_sequences(encoding['input_ids'], maxlen=MAX_LEN, dtype=torch.Tensor ,truncating="post",padding="post")
    input_ids = input_ids.astype(dtype = 'int64')
    input_ids = torch.tensor(input_ids) 

    attention_mask = pad_sequences(encoding['attention_mask'], maxlen=MAX_LEN, dtype=torch.Tensor ,truncating="post",padding="post")
    attention_mask = attention_mask.astype(dtype = 'int64')
    attention_mask = torch.tensor(attention_mask)       

    return {
    'review_text': text,def train_epoch(

model,
data_loader,
loss_fn,
optimizer,
device,
scheduler,
n_examples
):
model = model.train()
losses = []
correct_predictions = 0
for d in data_loader:
input_ids = d[“input_ids”].reshape(4,512).to(device)
print(type(input_ids))
attention_mask = d[“attention_mask”].to(device)
print(attention_mask.shape)
labels = d[“labels”].to(device)
outputs = check(input_ids=input_ids,token_type_ids=None, attention_mask=attention_mask,labels= labels)
print(d[‘outputs’].shape)

_, preds = torch.max(outputs, dim=1)
loss = loss_fn(outputs, labels)
correct_predictions += torch.sum(preds == labels)
losses.append(loss.item())
loss.backward()
nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)
optimizer.step()
scheduler.step()
optimizer.zero_grad()

return correct_predictions.double() / n_examples, np.mean(losses)

train_acc, train_loss = train_epoch(
check,
train_data_loader,
loss_fn,
optimizer,
device,
scheduler,
len(df_train))
print(f’Train loss {train_loss} accuracy {train_acc}’)’’’

error:
forward(self, input_ids, token_type_ids, attention_mask, labels)
9 torch.nn.init.xavier_normal_(self.classifier.weight)
10 def forward(self,input_ids=None,token_type_ids=None,attention_mask=None,labels=None):
—> 11 _, pooled_output = self.xlnet(input_ids=input_ids,attention_mask=attention_mask,token_type_ids=token_type_ids,labels=labels)
12 print(pooled_output[0])
13 output = self.drop(pooled_output)

ValueError: not enough values to unpack (expected 2, got 1)

During a multiple value assignment, the ValueError: not enough values to unpack occurs when either you have fewer objects to assign than variables, or you have more variables than objects. This error caused by the mismatch between the number of values returned and the number of variables in the assignment statement. This error happened mostly in the case of using python split function. Verify the assignment variables. If the number of assignment variables is greater than the total number of variables, delete the excess variable from the assignment operator. The number of objects returned, as well as the number of variables available are the same. This will resolve the value error.