Hi,
How can I freeze network in initial layers except two last layers? Do I need to implement filter in optimizer for those fixed layers? if yes how?
class Net_k(nn.Module):
#The __init__ function stack the layers of the
#network Sequentially
def __init__(self):
super(Net_k, self).__init__()
self.main = nn.Sequential(
nn.Linear(input_n,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,h_nk),
Swish(),
nn.Linear(h_nk,1),
Swish(),
)
def forward(self,x):
output = self.main(x)
return output
optimizer_k = optim.Adam(net_k.parameters(), lr=learning_rate, betas = (0.9,0.99),eps = 10**-15)