When to call backwards

Hi ,

‘’’ while steps < max_steps:
print (‘Step {}/{}’.format(steps, max_steps))
print(’-’*10)

    for phase in ['train', 'val']:
        if phase == 'train':
            model.train(True)
        else:
            model.train(False)  # Set model to evaluate mode
        tot_loss = 0.0
        tot_loc_loss = 0.0
        tot_cls_loss = 0.0
        tot_acc = 0.0
        num_iter = 0

        for data in dataloaders[phase]:
            num_iter += 1

            # get the inputs
            inputs, labels = data
            #print(np.shape(inputs),'inputs')
            # wrap them in Variable
            inputs = Variable(inputs.cuda())
            # t = inputs.size(2)
            labels = Variable(labels.cuda())

            per_frame_logits = model(inputs)

            criterion = nn.CrossEntropyLoss().cuda()
            # cls_loss = F.binary_cross_entropy_with_logits(torch.max(per_frame_logits, dim=2)[0], labels)
            #print(np.shape(torch.max(labels,dim=1)[1].long()))
            cls_loss = criterion(per_frame_logits,torch.max(labels,dim=1)[1].long())
            tot_cls_loss += cls_loss.data
            optimizer.zero_grad()
            # loss = (0.5*loc_loss + 0.5*cls_loss)/num_steps_per_update
            loss = cls_loss
            tot_loss += loss.data
            loss.backward()
            optimizer.step()
            #print(np.shape(labels))
            #print(np.shape(per_frame_logits))
            acc = calculate_accuracy(per_frame_logits, torch.max(labels,dim=1)[1])# topk=(1,))
            tot_acc += acc

‘’’
I have a question about the code before-mentioned, in phase of validation is it doing backward propagation because I m calling loss.backwards or is it okay because i do model.train(False).

I would be glad if someone can clear this for me.
Thanksss