I built a cnn module according to the 60-minutes tutorial:
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self):
super(Net,self).__init__()
self.conv1=nn.Conv2d(3,6,5)
self.pool1=nn.MaxPool2d(2,2)
self.conv2=nn.Conv2d(6,16,5)
self.pool2=nn.MaxPool2d(2,2)
self.fc1=nn.Linear(16*5*5,200)
self.fc2=nn.Linear(200,120)
self.fc3=nn.Linear(120,80)
def forward(self,x):
x=self.pool1(F.relu(self.conv1(x)))
x=self.pool2(F.relu(self.conv2(x)))
x=x.view(-1,16*5*5)
x=F.relu(self.fc1(x))
x=F.relu(self.fc2(x))
x=self.fc3(x)
return x
net=Net()
print(net)
Then I trained it:
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
criterion=nn.CrossEntropyLoss()
optimizer=optim.SGD(net.parameters(),lr=0.001,momentum=0.9)
for epoch in range(2):
runningLoss=0.0
for i,inputs in enumerate(imageTensor):
inputs=inputs.float()
inputs=Variable(inputs)
labels=classIdList[i:i+4]
labels=np.array(labels)
labels=torch.from_numpy(labels)
labels=Variable(labels)
optimizer.zero_grad()
outputs=net(inputs)
loss=criterion(outputs,labels)
loss.backward()
optimizer.step()
runningLoss+=loss.data[0]
if i%200==199:
print('[%d,%5d] %.3f'%(epoch+1,i+1,runningLoss/200))
runningLoss=0.0
print('Finished Training!')
But the loss rate didn’t go down as expected:
Then I test the module on validation images:
from torch.autograd import Variable
correct=0
total=0
for i,image in enumerate(imageTensor):
image=image.float()
image=image.unsqueeze(0)
image=Variable(image)
label=classIdList[i]
output=net(image)
_,predicted=torch.max(output.data,1)
predicted=predicted.numpy()[0]
print('predicted:',predicted)
print('label:',label)
correct+=(predicted==label)
total+=1
print(correct,total)
print('Accuracy on validation is %.3f'%(correct/total))
But the module always gives the same label 19 for different image:
Any help will be appreciated!