I dont know how to pop up graph and training accuracy coding

My question is about how to pop up graph for training and validation accuracy. Below are the codes that i use. I have try many ways but still cannot pop up graph. Can anyone help me with this :frowning:

Train the model

def train(n_epochs, loaders, model, optimizer, criterion, use_cuda, save_path): '''returns trained model'''

# Initialize tracker for minimum validation loss
valid_loss_min = np.inf

for epoch in range(1, n_epochs+1):
    # In the training loop, I track down the loss
    # Initialize variables to monitor training and validation loss
    train_loss = 0.0
    valid_loss = 0.0

    # Model training
    model.train()
    for batch_idx, (data,target) in enumerate(trainloader):
        # 1st step: Move to GPU
        if use_cuda:
            data,target = data.cuda(), target.cuda()
  
        # Then, clear (zero out) the gradient of all optimized variables
        optimizer.zero_grad()
        # Forward pass: compute predicted outputs by passing inputs to the model
        output = model(data)
        # Perform the Cross Entropy Loss. Calculate the batch loss.
        loss = criterion(output, target)
        # Backward pass: compute gradient of the loss with respect to model parameters
        loss.backward()
        # Perform optimization step (parameter update)
        optimizer.step()
        # Record the average training loss
        train_loss = train_loss + ((1/ (batch_idx + 1 ))*(loss.data-train_loss))
  
    # Model validation
    model.eval()
    for batch_idx, (data,target) in enumerate(validloader):
        # Move to GPU
        if use_cuda:
            data, target = data.cuda(), target.cuda()
        # Update the average validation loss
        # Forward pass: compute predicted outputs by passing inputs to the model
        output = model(data)
        # Calculate the batch loss
        loss = criterion(output, target)
        # Update the average validation loss
        valid_loss = valid_loss + ((1/ (batch_idx +1)) * (loss.data - valid_loss))
  
    # print training/validation stats
    print('Epoch: {} \tTraining Loss: {:.5f} \tValidation Loss: {:.5f}'.format(
        epoch,
        train_loss,
        valid_loss))

    # Save the model if validation loss has decreased
    if valid_loss <= valid_loss_min:
        print('Validation loss decreased ({:.5f} --> {:.5f}). Saving model ...'.format(
              valid_loss_min,
              valid_loss))
        torch.save(model.state_dict(), 'model_transfer.pt')
        valid_loss_min = valid_loss

# Return trained model
return model

Define loaders transfer

loaders_transfer = {‘train’: trainloader,
‘valid’: validloader,
‘test’: testloader}

Training the model

model_transfer = train(50, loaders_transfer, model_transfer, optimizer_transfer, criterion_transfer, use_cuda, ‘model_transfer.pt’)