Hello, Im trying to implement the custom dataset mentioned here, into a model that can detect faces.
The problem: while training, the loss is nan. I dont know what to do.
Code:
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 3)
self.conv2 = nn.Conv2d(6, 9, 3)
self.fc1 = nn.Linear(9*171*171, 1500)
self.fc2 = nn.Linear(1500, 544)
self.fc3 = nn.Linear(544, 136)
def forward(self, x):
x= F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x= torch.flatten(x,1)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
transformer = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
])
batchsize = 4
trainset = projectdata(csv_file = 'face_landmarks.csv', root_dir = 'faces',transform=transformer)
train_loader = DataLoader(trainset, batch_size=batchsize, shuffle=True, num_workers=0)
optimizer = optim.SGD(net.parameters(), lr=0.0001, momentum=0.9)
def cross_entropy(input, target):
return torch.mean(-torch.sum(target * torch.log(input), 1))
for epochs in range(2):
running_loss = 0.0
for i,data in enumerate(train_loader,0):
input,labels = data
optimizer.zero_grad()
outputs = net(input)#136 PREDICTED annotations tensor x4 (batch size)
outputs = outputs.reshape((batchsize,68,2)) #reshaped to fit the labels shape
loss = cross_entropy(outputs,labels) #calculating the loss
loss.backward()
optimizer.step()
Ive tried using leaky_relu
, but with no success either.
Also tried to switch the optimizer to Adam
.
Thanks!