Hallo I got this message while training lstm model :

UserWarning: Using a target size (torch.Size([28, 7, 1, 1])) that is different to the input size (torch.Size([28, 7])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size. I need to know how to solve this …

This is the training loop:

def train_model():

# Train the model

num_epochs = 10

for epoch in range(num_epochs):

for tra_batch in train_loader:

X_train, y_train = tra_batch

optimizer.zero_grad()

output = model(X_train)

loss = criterion(output, y_train.squeeze(-1))

loss.backward()

optimizer.step()

```
# Validate the model
model.eval()
with torch.no_grad():
val_losses = []
for val_batch in valid_loader:
X_val, y_val = val_batch
val_output = model(X_val)
val_loss = criterion(val_output, y_val.squeeze(-1))
val_losses.append(val_loss.item())
# Log training and validation loss with W&B (outside the validation loop)
wandb.log({"Training Loss": loss.item(), "Validation Loss": sum(val_losses) / len(val_losses)})
print(f'Epoch [{epoch}/{num_epochs}], Training Loss: {loss.item():.4f}, Validation Loss: {(sum(val_losses) / len(val_losses)):.4f}')
```