I am trying to use Adam optimiser, and after building the model and define optimiser as follows, I am getting an error ValueError: optimizer got an empty parameter list
which I don’t know how to deal with it? Any comment would appreciate in advance.
class NeuralNet(nn.Module):
def __int__(self):
super(NeuralNet, self).__init__()
self.conv = nn.conv2d(1, 28, kernel_size=(3, 3))
self.pool = nn.maxpoo2d(2, 2)
self.hidden = nn.Linear(28*13*13, 128)
self.drop = nn.Dropout(0.2)
self.out = nn.linear(128, 10) # fully connected layer
self.act = nn.ReLU()
def forward(self, x):
x = self.act(self.conv(x))
print(x.size())
x = self.pool(x)
x = x.view(x.size(0), -1)
x = self.act(self.hidden())
x = self.drop(x)
x = self.out()
return x
net = NeuralNet()
import torch.optim as optim
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(net.parameters(), lr=0.001)