@ptrblck Thanks for your response. Yes, I checked the indentation. Indeed, my code runs normally and executes __init__
until the end, the problem rises when the __setattr__
function is called.
Here is the code of my Class:
class ResNetGenerator(nn.Module):
def __init__(self, input_nc=3, output_nc=3, n_residual_blocks=9, use_dropout=False):
print("*"*50)
super(ResNetGenerator, self).__init__()
# super().__init__()
# nn.Module.__init__(self)
# Input layer
model = [nn.ReflectionPad2d(3),
nn.Conv2d(input_nc, 64, kernel_size=7, padding=0),
nn.InstanceNorm2d(64),
nn.ReLU(True)]
# Encoding layers
in_features = 64
out_features = in_features * 2
for _ in range(2):
model += [nn.Conv2d(in_features, out_features, kernel_size=3, stride=2, padding=1),
nn.InstanceNorm2d(out_features),
nn.ReLU(True)]
in_features = out_features
out_features = in_features * 2
# Transformations layers (Residual blocks)
for _ in range(n_residual_blocks):
model += [ResidualBlock(in_features, use_dropout)]
# Decoding layers
out_features = in_features // 2
for _ in range(2):
model += [nn.ConvTranspose2d(in_features, out_features, kernel_size=3, stride=2, padding=1, output_padding=1),
nn.InstanceNorm2d(out_features),
nn.ReLU(True)]
in_features = out_features
out_features = in_features // 2
# Output layer
model += [nn.ReflectionPad2d(3),
nn.Conv2d(64, output_nc, kernel_size=7, padding=0),
nn.Tanh()]
self.model = nn.Sequential(*model)
print("#"*50)
def forward(self, x):
return self.model(x)
And Here is the complete Traceback:
File "train.py", line 40, in <module>
model = ColorizationCycleGAN(args)
File "/path/cycle_gan.py", line 27, in __init__
self.G_A2B = ResNetGenerator(input_nc=self.input_nc, output_nc=self.output_nc, n_residual_blocks=9, use_dropout=False)
File "/path/.local/lib/python3.6/site packages/torch/nn/modules/module.py", line 544, in __setattr__
"cannot assign module before Module.__init__() call")
AttributeError: cannot assign module before Module.__init__() call