Passing tensorboard summaries when saving and loading checkpoints

I am using Tensorboard for visualizing the results, how can I pass the summary when saving a general checkpoint so I can resume visualization from same epoch?

code i’m using:

checkpoint = {'epoch': epoch + 1, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict()}
def resume_from_checkpoint(fpath='xyz.pth', model, optimizer=None, scheduler=None):

    print('Loading checkpoint from "{}"'.format(fpath))
    checkpoint = load_checkpoint(fpath)
    model.load_state_dict(checkpoint['state_dict'])
    print('Loaded model weights')

    if optimizer is not None and 'optimizer' in checkpoint.keys():
        optimizer.load_state_dict(checkpoint['optimizer'])
        print('Loaded optimizer')
    if scheduler is not None and 'scheduler' in checkpoint.keys():
        scheduler.load_state_dict(checkpoint['scheduler'])
        print('Loaded scheduler')

    epoch = checkpoint['epoch']
    print('Last epoch = {}'.format(epoch))

    if 'rank1' in checkpoint.keys():
        print('Last rank1 = {:.1%}'.format(checkpoint['rank1']))
    return epoch
for epoch in range(num_epochs):
    total_correct = 0
    total_loss = 0
    if epoch % 1 == 0:
        checkpoint = {'epoch': epoch + 1, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict()}
        print("saving checkpoint")
        save_checkpoint(checkpoint)
    for i, traindata in enumerate(trainloader):
        images = 
        labels = 
        optimizer.zero_grad()

my apologies in advance if the question doesn’t belong here.