Beginner RuntimeError: shape '[-1, 784]' is invalid for input of size 12288

Hello I am new to pytorch.
I am trying to learn and build a fully connected neural network with Cifar data.
However I am getting this error. I am kinda confused since so many people have complied successfully using this kind of codes.

import torch
import torchvision
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms

batch_size=4
learning_rate=0.01
epochs=10
log_interval=10

transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])

trainset = torchvision.datasets.CIFAR10(root=’./data’, train=True, download=True, transform=transform)

train_loader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)

testset = torchvision.datasets.CIFAR10(root=’./data’, train=False, download=True, transform=transform)

test_loader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)

classes = (‘plane’, ‘car’, ‘bird’, ‘cat’, ‘deer’, ‘dog’, ‘frog’, ‘horse’, ‘ship’, ‘truck’)

class Net(nn.Module):
def init(self):
super(Net, self).init()
self.fc1 = nn.Linear(784, 200)
self.fc2 = nn.Linear(200, 200)
self.fc3 = nn.Linear(200, 10)

def forward(self, x):
    x = F.relu(self.fc1(x))
    x = F.relu(self.fc2(x))
    x = self.fc3(x)
    return F.log_softmax(x)

optimizer = optim.SGD(net.parameters(), lr=learning_rate)
Loss = nn.CrossEntropyLoss()

for epoch in range(epochs):
for batch_idx, (data, target) in enumerate(train_loader):
data, target = Variable(data), Variable(target)
# resize data from (batch_size, 1, 28, 28) to (batch_size, 2828)
data = data.view(-1, 28
28)
optimizer.zero_grad()
logits = net(data)
loss = Loss(logits, target)
loss.backward()
optimizer.step()
if batch_idx % log_interval == 0:
print(‘Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}’.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.data[0]))


RuntimeError Traceback (most recent call last)
in
3 data, target = Variable(data), Variable(target)
4 # resize data from (batch_size, 1, 28, 28) to (batch_size, 2828)
----> 5 data = data.view(-1, 28
28)
6 optimizer.zero_grad()
7 logits = net(data)

RuntimeError: shape ‘[-1, 784]’ is invalid for input of size 12288

I have tried to change the Batch_Size, but it didn’t work.

CIFAR10 returns images of the shape [3, 32, 32], which won’t work using your .view(-1, 28*28) call.
Instead use .view(-1, 3*32*32) and set the same value as the number of input features to your first linear layer.

Also, Variables are deprecated, so you can use tensors directly.