AttributeError: 'int' object has no attribute 'to'

For CNN, multi label classification, getting error while testing the image.

def runmodel(seed):
torch.manual_seed(seed)
random.seed(seed)
np.random.seed(seed)
torch.use_deterministic_algorithms(True)

device = torch.device(‘cpu’ if not torch.cuda.is_available() else ‘cuda’)
model = CNN().to(device)
criterion = nn.CrossEntropyLoss().to(device)
optimizer = optim.Adam(model.parameters(), lr=0.001) #, betas=(0.9,0.999))

for epoch in range(1, 5):
loss_train, acc_train = train(model, device, dataloader_train, optimizer, criterion, epoch)
print(‘Epoch {} Train: Loss: {:.4f}, Accuracy: {:.3f}%\n’.format(
epoch, loss_train, 100. * acc_train))
loss_test, acc_test = test(model, device, dataloader_test, criterion)
print(‘Epoch {} Test : Loss: {:.4f}, Accuracy: {:.3f}%\n’.format(
epoch, loss_test, 100. * acc_test))
return acc_test

def train(model, device, data_loader, optimizer, criterion, epoch):
model.train()
loss_train = 0
num_correct = 0
for batch_idx, (data, target) in enumerate(data_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
loss_train += loss.item()
prediction = output.argmax(dim=1)
num_correct += prediction.eq(target).sum().item()
if batch_idx % 50 == 0:
print(‘Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.4f}\tAccuracy: {:.0f}%’.format(
epoch, batch_idx * len(data), len(data_loader.dataset),
100. * batch_idx / len(data_loader), loss_train / (batch_idx + 1),
100. * num_correct / (len(data) * (batch_idx + 1))))
loss_train /= len(data_loader)
accuracy = num_correct / len(data_loader.dataset)
return loss_train, accuracy

def test(model, device, data_loader, criterion):
model.eval()
loss_test = 0
num_correct = 0
with torch.no_grad():
for data, target in data_loader:
data, target = data.to(device), target.to(device)
output = model(data)
loss = criterion(output, target)
loss_test += loss.item() # sum up batch loss
prediction = output.argmax(dim=1)
num_correct += prediction.eq(target).sum().item()
loss_test /= len(data_loader)
accuracy = num_correct / len(data_loader.dataset)
return loss_test, accuracy

I am getting the error. I am running in collar.

From a single skim, this isn’t right. criterion = nn.CrossEntropyLoss() is right - you just need to instantiate the CrossEntropyLoss.

Anyway, please post a minimum executable snippet enclosed within ``` mentioning the exact line that produces the said error.