Hi,
I have got a strange problem when I use DCGAN in pytorch. Could anyone help me?
After I create object of both generator and discriminator, I try to print models, but models are empty. And the error raised when I initialize adam optimizer, it said "optimizer got an empty parameter list."
The code is:
### network
netG = DCGAN_Generator()
netD = DCGAN_Discriminator()
netG.weight_init(mean=0.0, std=0.02)
netD.weight_init(mean=0.0, std=0.02)
print(netG)
print(netD)
netG_optimizer = optim.Adam(netG.parameters(), lr=opt.lr, betas=(0.5, 0.999))
netD_optimizer = optim.Adam(netD.parameters(), lr=opt.lr, betas=(0.5, 0.999))
###Generator
class DCGAN_Generator(nn.Module):
def __init__(self):
super(DCGAN_Generator, self).__init__()
# input is Z, going into a convolution
self.deconv1 = nn.ConvTranspose2d(100, 1024, (8,4), 1, 0, bias=False),
self.bn1 = nn.BatchNorm2d(1024),
self.deconv2 = nn.ConvTranspose2d(1024, 512, 4, 2, 1, bias=False),
self.bn2 = nn.BatchNorm2d(512),
self.deconv3 = nn.ConvTranspose2d(512, 256, 4, 2, 1, bias=False),
self.bn3 = nn.BatchNorm2d(256),
self.deconv4 = nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),
self.bn4 = nn.BatchNorm2d(128),
self.deconv5 = nn.ConvTranspose2d(128, 3, 4, 2, 1, bias=False),
def weight_init(self, mean, std):
for m in self._modules:
normal_init(self._modules[m], mean, std)
def forward(self, x):
x = F.relu1(self.bn1(self.deconv1))
x = F.relu2(self.bn2(self.deconv2(x)))
x = F.relu3(self.bn3(self.deconv3(x)))
x = F.relu4(self.bn4(self.deconv4(x)))
x = F.tanh5(self.deconv5(x))
return x
### discriminator
class DCGAN_Discriminator(nn.Module):
def __init__(self):
super(DCGAN_Discriminator, self).__init__()
self.conv1 = nn.Conv2d(3, 128, 4, 2, 1, bias=False),
self.bn1 = nn.BatchNorm2d(128),
self.conv2 = nn.Conv2d(128, 256, 4, 2, 1, bias=False),
self.bn2 = nn.BatchNorm2d(256),
self.conv3 = nn.Conv2d(256, 512, 4, 2, 1, bias=False),
self.bn3 = nn.BatchNorm2d(512),
self.conv4 = nn.Conv2d(512, 1024, 4, 2, 1, bias=False),
self.bn4 = nn.BatchNorm2d(1024),
self.conv5 = nn.Conv2d(1024, 1, (8,4), 1, 0, bias=False),
def weight_init(self, mean, std):
for m in self._modules:
normal_init(self._modules[m], mean, std)
def forward(self, x):
x = F.leaky_relu1(self.bn1(self.conv1))
x = F.leaky_relu2(self.bn2(self.conv2(x)))
x = F.leaky_relu3(self.bn3(self.conv3(x)))
x = F.leaky_relu4(self.bn4(self.conv4(x)))
x = F.sigmoid(self.conv5(x))
return x