from torch import optim
def fit(train_dl,model,test_dl=None,epoch=1000,lr=0.001):
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)
for epoch in range(epoch):
running_loss = 0.0
model.train()
for i,(inputs, labels) in enumerate(train_dl):
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()*inputs.size(0)
# if i % 2000 == 1999:
# print('[%d, %5d] loss: %.3f' %
# (epoch + 1, i + 1, running_loss/2000))
# running_loss = 0.0
print("Train Loss: ",running_loss/len(train_dl.sampler))
if test_dl is not None:
model.eval()
running_loss = 0
for i,(inputs, labels) in enumerate(test_dl):
outputs = model(inputs)
loss = criterion(outputs, labels)
running_loss += loss.item()*inputs.size(0)
print("Validation Loss: ",running_loss/len(test_dl.sampler))
print('Finished Training')
Is the equation for the per epoch loss correct?