Training is very slow during computation of Loss

Here’s My training loop code…

for epoch in range(epochs):
    for count, data in enumerate(data_list):
        
        train_image = []
        sample_image = []
        
        for image in data:
            img_train = cv2.imread(path + image).T/255
            img_train = img_train.reshape(1, img_train.shape[0], img_train.shape[1], img_train.shape[2])
            
            img_sample = cv2.imread(path2 + image,0).T/255
            img_sample = img_sample.reshape(1, 1, img_sample.shape[0], img_sample.shape[1])
            train_image.append(img_train)
            sample_image.append(img_sample)
            
            assert(img_sample.shape == (1, 1, 144, 144))
            
        train_image = torch.from_numpy(np.concatenate(train_image, axis = 0)).type('torch.cuda.FloatTensor')
        sample_image = torch.from_numpy(np.concatenate(sample_image, axis = 0)).type('torch.cuda.FloatTensor')
        
        label = torch.full((train_image.shape[0],), real_label, device=device)
        
        #Training the discriminator... minimizing -(log(D(x)) + log(1 - D(G(Z))))
        
        dis.zero_grad()
        Gen.zero_grad()
        
        G_z = Gen(sample_image.detach())
        
        disc_real_out = dis(train_image.detach()).view(-1)
        
        error_real = GAN_loss(disc_real_out, label)
        error_real.backward()
        
        disc_fake_out = dis(G_z.detach()).view(-1)
        label.fill_(fake_label)
        error_fake = GAN_loss(disc_fake_out, label)
        error_fake.backward()
        
        total_disc_error = error_real + error_fake
        D_optimizer.step()
        
        #Training the Generator... maximizing log(D(G(Z))))
        
        D_G_z = dis(G_z.detach()).view(-1)
        label.fill_(real_label)
        error_gen = GAN_loss(D_G_z, label)
        error_gen.backward()
        
        G_optimizer.step()
        
        G_losses.append(error_gen.item())
        D_losses.append(total_disc_error.item())
    
        print("Discriminator Loss : ", total_disc_error.item(), "\t", "Generator Loss : ", total_gen_error.item())
```