I am not sure what accidental change in my code caused this. Could you please guide me how to fix this?
The code is:
if train:
print('training...')
torch.autograd.set_detect_anomaly(True)
for i_batch, sample_batched in enumerate(dataloader_train):
feats = torch.stack(sample_batched['image'])
labels = torch.as_tensor(sample_batched['label']).cuda()
print('feats shape: ', feats.shape)
print('labels shape: ', labels.shape)
pred, labels, loss = model.forward(feats, labels)
output = model(feats)
loss = criterion(output, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
acc = (output.argmax(dim=1) == labels).float().mean()
train_preds = output.argmax(dim=1)
feats shape: torch.Size([64, 419, 512])
labels shape: torch.Size([64])
Traceback (most recent call last):
File "main_classifier.py", line 289, in <module>
pred, labels, loss = model.forward(feats, labels)
File "/home/jalal/research/venv/dpcc/lib/python3.8/site-packages/torch/nn/parallel/data_parallel.py", line 166, in forward
return self.module(*inputs[0], **kwargs[0])
File "/home/jalal/research/venv/dpcc/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
TypeError: forward() takes 2 positional arguments but 3 were given