I have a network that call a class my_conv
. In the class, I have a convolution and I want to set the learning rate to the convolution as 0.01
, while the learning rate of other layers will be 0.0001
. I have tried the below code but it shows the error
param_group[‘params’] = list(params)
TypeError: ‘NoneType’ object is not iterable
How should I set the learning rate to a specified layer?
This is my code
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
class my_conv(nn.Module):
def __init__(self, in_channels, out_channels):
super(my_conv, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1, groups =1, bias=False)
def forward(self, x):
x = self.conv(x)
return x
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.customer_conv = my_conv(1,32)
self.traditional_conv = nn.Conv2d(32, 1, kernel_size=3, stride=1, padding=1, bias=False)
def forward(self, x):
x = self.customer_conv(x)
x = self.traditional_conv(x)
return x
if __name__=="__main__":
net = Net()
optimizer = optim.Adam(
[{'params': net.customer_conv.conv.weight, 'lr': 0.01}, {'params': net.customer_conv.conv.bias, 'lr': 0.01}],
lr=0.001, weight_decay=0.0001)
batch_size, channel, height, width = 1, 1, 4, 4
x = torch.randint(1, 100, (batch_size,channel, height, width)).float()
out = net(Variable(x))
print (out.size())
out.backward()
optimizer.step()
print("weight", net.customer_conv.conv.weight.data.numpy(), "grad", net.customer_conv.conv.weight.grad.data.numpy())
print("bias", net.customer_conv.conv.bias.data.numpy(), "grad", net.customer_conv.conv.grad.data.numpy())