Discriminator in GAN determines real and fake with prob 0.5( BCE loss log(0.5)= 0.69), what can I do to imrpove discriminator?

[1/600][182]
Loss_D: 1.42 Loss_G: 0.74 Time: 319.68s

[2/600][182]
Loss_D: 1.40 Loss_G: 0.70 Time: 317.61s

[3/600][182]
Loss_D: 1.40 Loss_G: 0.69 Time: 318.28s

[4/600][182]
Loss_D: 1.39 Loss_G: 0.69 Time: 319.15s

[5/600][182]
Loss_D: 1.39 Loss_G: 0.69 Time: 319.27s

[6/600][182]
Loss_D: 1.39 Loss_G: 0.69 Time: 319.53s

[7/600][182]
Loss_D: 1.39 Loss_G: 0.69 Time: 319.34s

[8/600][182]
Loss_D: 1.39 Loss_G: 0.69 Time: 318.61s

[9/600][182]
Loss_D: 1.38 Loss_G: 0.70 Time: 319.03s

[10/600][182]
Loss_D: 1.38 Loss_G: 0.69 Time: 319.61s

I am mapping 1024 dimentional vector into a 100-d vector

class MAP_NET_IT(nn.Module):
#Generator from Image Embedding to text embedding####
def init(self):
super(MAP_NET_IT, self).init()
#self.df_dim = cfg.TEXT.DIMENSION
self.ef_dim = cfg.TEXT.EMBEDDING_DIM
self.in_dim = cfg.GAN.Z_DIM + cfg.TEXT.DIMENSION
self.define_module()

def define_module(self):
    ndf = self.in_dim
    efg = self.ef_dim
    

    self.fc1 = nn.Sequential(
        nn.Linear(ndf, ndf//2),
        #nn.BatchNorm1d(ndf//2),
        #Swish(),
        nn.LeakyReLU(0.2, inplace=True),
        nn.Linear(ndf//2, ndf//4),
        #nn.BatchNorm1d(ndf//4),
        #Swish(),
        #nn.Linear(ndf//4, ndf//8),
        #nn.BatchNorm1d(ndf//8),
        #Swish())
        nn.LeakyReLU(0.2, inplace=True))
    self.fc2 = nn.Sequential(
            nn.Linear(ndf//4, efg),
            #nn.Linear(ndf//8, efg),
            nn.Tanh())
            

def forward(self, z_code, c_code):
    in_code = torch.cat((c_code, z_code), 1)
    x_code = self.fc1(in_code)
    output = self.fc2(x_code)
    
    return output

class D_NET_TEXT(nn.Module):
#To discriminate for text embedding#######################3
def init(self):
super(D_NET_TEXT, self).init()
self.df_dim = cfg.TEXT.EMBEDDING_DIM
self.define_module()

def define_module(self):
    ndf = self.df_dim

    self.encodings = nn.Sequential(
        nn.Linear(ndf, ndf//2),
        nn.BatchNorm1d(ndf//2),
        Swish(),
        nn.Linear(ndf//2, ndf//4),
        nn.BatchNorm1d(ndf//4),
        Swish(),
        nn.Linear(ndf//4, ndf//8),
        nn.BatchNorm1d(ndf//8),
        Swish())
    self.logits = nn.Sequential(
            nn.Linear(ndf//8, 1),
            nn.Sigmoid())
            

def forward(self, x_var):
    x_code = self.encodings(x_var)
    output = self.logits(x_code)
    
    return [output.view(-1)]