Thank you so much for your patient guidance ! I tried this code. It reports error like this:
Traceback (most recent call last):
File "/home/mitc/pycharm-2017.3.3/helpers/pydev/pydevd.py", line 1668, in <module>
main()
File "/home/mitc/pycharm-2017.3.3/helpers/pydev/pydevd.py", line 1662, in main
globals = debugger.run(setup['file'], None, None, is_module)
File "/home/mitc/pycharm-2017.3.3/helpers/pydev/pydevd.py", line 1072, in run
pydev_imports.execfile(file, globals, locals) # execute the script
File "/home/mitc/lcy/Pytorch-SR/nd_vdsr_unfold.py", line 137, in <module>
], lr=0.1, weight_decay=0.0001)
File "/home/mitc/anaconda2/envs/lcy-pytorch/lib/python2.7/site-packages/torch/optim/adam.py", line 28, in __init__
super(Adam, self).__init__(params, defaults)
File "/home/mitc/anaconda2/envs/lcy-pytorch/lib/python2.7/site-packages/torch/optim/optimizer.py", line 61, in __init__
raise ValueError("can't optimize a non-leaf Variable")
ValueError: can't optimize a non-leaf Variable
my code is:
class Net(nn.Module):
def __init__(self):#1,3,11,13,1
super(Net, self).__init__()
self.layer11 = nn.Sequential(
nn.BatchNorm3d(num_features=1,momentum=0.999,affine=False),
nn.ReLU(inplace=True),
nn.Conv3d(in_channels=1,out_channels=16,kernel_size=(3,3,3),padding=(1,1,1),bias=True))
self.layer21 = nn.Sequential(
nn.BatchNorm3d(num_features=16, momentum=0.999, affine=False),
nn.ReLU(inplace=True),
nn.Conv3d(in_channels=16, out_channels=16, kernel_size=(3, 3, 3), padding=(1, 1, 1), bias=True))
....
def forward(self, x, residual):
#residual = x1
out = self.layer11(x)
out = self.layer21(out)
out = self.layer22(out)
....
out = torch.add(out, residual)
return out
if __name__=="__main__":
net = Net()
optimizer = optim.Adam([
{'params': net.layer11[2].weight},
{'params': net.layer11[2].bias, 'lr': 0.01}
], lr=0.1, weight_decay=0.0001)
.....