Hi,
I have the following code:
class LeNet5(nn.Module):
def __init__(self, num_classes, grayscale=False):
super(LeNet5, self).__init__()
self.grayscale = grayscale
self.num_classes = num_classes
if self.grayscale:
in_channels = 1
else:
in_channels = 3
# features layers
self.layer = {}
self.layer[1] = nn.Sequential(
nn.Conv2d(in_channels, 6, kernel_size=5),
nn.Tanh()
)
self.layer[2] = nn.Sequential(
nn.MaxPool2d(kernel_size=2)
)
self.layer[3] = nn.Sequential(
nn.Conv2d(6, 16, kernel_size=5),
nn.Tanh()
)
self.layer[4] = nn.Sequential(
nn.MaxPool2d(kernel_size=2)
)
# classifier, linears layers
self.layer[5] = nn.Sequential(
nn.Linear(16*5*5, 120),
nn.Tanh()
)
self.layer[6] = nn.Sequential(
nn.Linear(120, 84),
nn.Tanh()
)
self.layer[7] = nn.Sequential(
nn.Linear(84, num_classes)
)
def forward(self, x):
forward_layer = {}
forward_layer[0] = x
forward_layer[1] = self.layer[1](forward_layer[0])
forward_layer[2] = self.layer[2](forward_layer[1])
forward_layer[3] = self.layer[3](forward_layer[2])
forward_layer[4] = self.layer[4](forward_layer[3])
forward_layer[4] = torch.flatten(forward_layer[4], 1)
forward_layer[5] = self.layer[5](forward_layer[4])
forward_layer[6] = self.layer[6](forward_layer[5])
forward_layer[7] = self.layer[7](forward_layer[6])
logits = forward_layer[7]
probas = F.softmax(logits, dim=1)
return logits, probas, x, forward_layer
It returns the error:
ValueError: optimizer got an empty parameter list
And I verified that the model is empty.
If I change the nams of the layers inside the class it works.
Can I define, inside the class, the layers as dict?