I was trying make a model that built a model with many layers depending on how many the user had. I saw somewhere (https://github.com/MorvanZhou/PyTorch-Tutorial/blob/master/tutorial-contents/504_batch_normalization.py) that setattr
can be used. Is it stricly necessary?
for example:
class MyNet(nn.Module):
## 5 conv net FC
def __init__(self,C,H,W, Fs, Ks, FC):
super(MyNet, self).__init__()
self.nb_conv_layers = len(Fs)
''' Initialize Conv layers '''
self.convs = []
out = Variable(torch.FloatTensor(1, C,H,W))
in_channels = C
for i in range(self.nb_conv_layers):
F,K = Fs[i], Ks[i]
conv = nn.Conv2d(in_channels,F,K) #(in_channels, out_channels, kernel_size)
self.convs.append(conv)
##
in_channels = F
out = conv(out)
''' Initialize FC layers'''
CHW = out.numel()
self.fc = nn.Linear(CHW,FC)
vs
class MyNet(nn.Module):
## 5 conv net FC
def __init__(self,C,H,W, Fs, Ks, FC):
super(MyNet, self).__init__()
self.nb_conv_layers = len(Fs)
''' Initialize Conv layers '''
self.convs = []
out = Variable(torch.FloatTensor(1, C,H,W))
in_channels = C
for i in range(self.nb_conv_layers):
F,K = Fs[i], Ks[i]
conv = nn.Conv2d(in_channels,F,K) #(in_channels, out_channels, kernel_size)
setattr(self,f'conv{i}',conv)
self._set_init(conv)
self.convs.append(conv)
##
in_channels = F
out = conv(out)
''' Initialize FC layers'''
CHW = out.numel()
self.fc = nn.Linear(CHW,FC)