ValueError: optimizer got an empty parameter list why does this happen?

In my NeuralNetwork, I can’t get model.parameters()

this is my NN

class NeuralNetwork(nn.Module):
  def __init__(self):
    super(NeuralNetwork, self).__init__()

    #down
    input_ch = 1
    output_ch = 64

    self.down = []
    for i in range(4):
      self.down.append(
        nn.Sequential(
          nn.Conv2d(input_ch, output_ch, 3, padding=1),
          nn.ReLU(inplace=True),
          nn.Conv2d(output_ch, output_ch, 3, padding=1),
          nn.ReLU(inplace=True),
          nn.MaxPool2d(2,2)
        )
      )
      input_ch = output_ch
      output_ch *= 2

    #bottom
    bottom = nn.Sequential(
      nn.Conv2d(input_ch, output_ch, 3, padding=1),
      nn.ReLU(inplace=True),
      nn.Conv2d(output_ch, output_ch, 3, padding=1),
      nn.ReLU(inplace=True),
      nn.ConvTranspose2d(output_ch, input_ch, kernel_size=2, stride=1)
    )

    #up
    input_ch = output_ch
    output_ch = input_ch // 2

    self.up = []
    for i in range(3):
      self.up.append(
        nn.Sequential(
          nn.Conv2d(input_ch, output_ch, 3, padding=1),
          nn.ReLU(inplace=True),
          nn.Conv2d(output_ch, output_ch, 3, padding=1),
          nn.ReLU(inplace=True),
          nn.ConvTranspose2d(output_ch, output_ch // 2, kernel_size=2, stride=2)
        )
      )
      input_ch = output_ch
      output_ch = input_ch // 2

    #end
    end = nn.Sequential(
        nn.Conv2d(input_ch, output_ch, 3, padding=1),
        nn.ReLU(inplace=True),
        nn.Conv2d(output_ch, output_ch, 3, padding=1),
        nn.ReLU(inplace=True),
        nn.Conv2d(output_ch, 1, 3, padding=1)
    )

  def forward(self, x):
    enc_out = []
    #encoder
    for i in range(4):
      if i == 0:
        enc_out[i] = self.down[i](x)
      else:
        enc_out[i] = self.down[i](enc_out[i-1])
    
    #bottom
    dec_out = self.bottom(enc_out[3])

    #decoder
    for i in range(3):
        dec_out = self.up[i](torch.cat((enc_out[3 - i], dec_out), dim=1))

    #end
    dec_out = self.end(torch.cat((enc_out[0], dec_out), dim=1))

    return dec_out

and the optimizer is here

class DenoiseImage():
  def __init__(self, model, learning_late=1e-3, batch_size=64):
    self.model = model
    self.loss_fn = nn.MSELoss()
    self.optimizer = torch.optim.AdamW(self.model.parameters(), lr=learning_late)

  def training(dataloader, epochs):
    for i in range(epochs):
      for batch, (x, y) in enumerate(dataloader):
        predict = self.model(x)
        loss = self.loss_fn(predict, y)

        self.optimizer.zero_grad()
        loss.backward()
        self.optimizer.step()

error message

ValueError                                Traceback (most recent call last)
<ipython-input-23-293c39f65724> in <module>()
    221 model = NeuralNetwork()
--> 222 dino = DenoiseImage(model, batch_size=64)
    223 

2 frames
/usr/local/lib/python3.7/dist-packages/torch/optim/optimizer.py in __init__(self, params, defaults)
     47         param_groups = list(params)
     48         if len(param_groups) == 0:
---> 49             raise ValueError("optimizer got an empty parameter list")
     50         if not isinstance(param_groups[0], dict):
     51             param_groups = [{'params': param_groups}]

ValueError: optimizer got an empty parameter list
<Figure size 576x576 with 0 Axes>

To properly register modules, you would have to use nn.ModuleList instead of a plain Python list.
Also, you are creating bottom and end, but are not registering them as self.bottom and self.end, so you might want to fix this, too.

Thank you, it works.
I fixed bottom and end to self.bottom and self.end.

But I can’t understand how I use nn.ModuleList instead of a plain Python list.

Replace

self.down = []

with

self.down = nn.ModuleList()

as well as self.up.

I understand.
Thank you for your quick and polite response.
:bowing_man: