Hi Ptrblck,
I add the second loss to the first loss and expect that the gradients, weights and result changes. But there is no change and difference with the time I just use one loss function. The fist one isBCELoss and the second one is L1.
I check the gradients in both case with loss1 and with loss1+loss2 but gradients were same. exact same. adding more loss does not have effect on gradient even if I used loss1+10*loss2
netG = Generator994(ngpu,nz,ngf).to(device)
optimizerG = optim.Adam(netG.parameters(), lr=lr2, betas=(beta1, 0.999))
netG.zero_grad()
label.fill_(real_label)
label=label.to(device)
output = netD(fake).view(-1)
# Calculate G's loss based on this output
loss1 = criterion(output, label)
xxx=torch.histc(Gaussy.squeeze(1).view(-1).cpu(),100, min=0, max=1, out=None)
ddGaussy=xxx/xxx.sum()
xxx1=torch.histc(fake.squeeze(1).view(-1).cpu(),100, min=0, max=1, out=None)
ddFake=xxx1/xxx1.sum()
loss2=abs(ddGaussy-ddFake).sum()
# Calculate gradients for G with 2 loss
errG=loss1+loss2
errG.backward()
for param in netG.parameters():
print(param.grad.data.sum())
# Update G
optimizerG.step()
## ------------------
class Generator994(nn.Module):
def __init__(self,ngpu,nz,ngf):
super(Generator994, self).__init__()
self.ngpu=ngpu
self.nz=nz
self.ngf=ngf
self.l1= nn.Sequential(
# input is Z, going into a convolution
nn.ConvTranspose2d(self.nz, self.ngf * 8, 3, 1, 0, bias=False),
nn.BatchNorm2d(self.ngf * 8),
nn.ReLU(True),)
# state size. (ngf*8) x 4 x 4
self.l2=nn.Sequential(nn.ConvTranspose2d(self.ngf * 8, self.ngf * 4, 3, 1, 0, bias=False),
nn.BatchNorm2d(self.ngf * 4),
nn.ReLU(True),)
# state size. (ngf*4) x 8 x 8
self.l3=nn.Sequential(nn.ConvTranspose2d( self.ngf * 4, self.ngf * 2, 3, 1, 0, bias=False),
nn.BatchNorm2d(self.ngf * 2),
nn.ReLU(True),)
# state size. (ngf*2) x 16 x 16
self.l4=nn.Sequential(nn.ConvTranspose2d( self.ngf*2, 1, 3, 1, 0, bias=False),nn.Sigmoid()
# nn.Tanh()
# state size. (nc) x 64 x 64
)
def forward(self, input):
out=self.l1(input)
out=self.l2(out)
out=self.l3(out)
out=self.l4(out)
print(out.shape)
return out