hello,
I am using vgg to classify cells in a chessboard, however, it happens to be non deterministic. Its it how I am using it ?
function splitting chess board:
image = Image.open(path_to_image).convert(“RGB”)
width = len(np.array(image)[1])
height = len(np.array(image)[0])
image = totensor(image.resize((int(round(width/100))*224,int(round(height/100))*224),Image.BILINEAR))
#print(image.shape)
for i in range(224,(int(round(width/100))*224)+224,224):##get pieces of the picture
for j in range(224,(int(round(height/100))*224)+224,224):
# plt.imshow(image[:3,i-224:i-10,j-224:j-10].float().permute(1,2,0).numpy())
# plt.show()
piece_try1 = get_piece(totensor(resize(topil(image[:3,i-224:i-10,j-224:j-10]))),False,“./pieces_detection/”)
piece_try2 = get_piece(totensor(resize(topil(image[:3,i-224:i-10,j-224:j-10]))),False,“./pieces_detection/”)
if(piece_try1 == piece_try2):
#print(“first try :”+piece_try1)
if (piece_try1 != “empty_cell”):
return False
else:
piece_try3 = get_piece(totensor(resize(topil(image[:3,i-224:i-10,j-224:j-10]))),False,“./pieces_detection/”)
print(“second try :”+piece_try3)
if (piece_try3 != “empty_cell”):
return Falsereturn True
Classifying the piece :
def get_piece(cell,transf,folder):
dsets = datasets.ImageFolder(folder + ‘dataset/’,transformation)
dset_loaders = torch.utils.data.DataLoader(dsets, batch_size=12, shuffle=False)
classes = dsets.classes
model = torch.load(folder + “model.ckpt”)
if(transf):
input = torch.unsqueeze(transformation(cell),0)
else:
input = torch.unsqueeze(cell,0)
res = model(Variable(input))
_, preds = torch.max(res.data, 1)
return classes[preds[0]]
the network training :
def train():
model = models.vgg16(pretrained=True)
resize = transforms.Resize((224,224))
dsets = datasets.ImageFolder(‘./dataset/’,transformation)
dset_loaders = torch.utils.data.DataLoader(dsets, batch_size=12, shuffle=False)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(),lr=0.001, momentum=0.5)
for i in range(0,200):
lost = 0
for data,labels in dset_loaders:
optimizer.zero_grad()
data = Variable(data)
labels = Variable(labels)
# forward
outputs = model(data)
_, preds = torch.max(outputs.data, 1)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
lost+= loss.data[0]
print("loss = " + str(lost/24))
return model
Any idea ?