Hello,
I am new to deep learning and using pytorch.
I have gone through some codes and I always see examples of weight initialization.
My questions are -
When do I initialize weights and what is the intuition behind weights initialization for neural networks.
Does random seeding have anything to do with weights iniitialization?
Say if I have nn Module before a pretrained model do I initialize weights for the nn Module? see below example
Thank you for the help.
class inNet(nn.Module):
def __init__(self):
super(inNet, self).__init__()
nC = 3
inC = nC * 2
self.bn1 = nn.BatchNorm2d(nC)
self.conv1 = nn.Conv2d(nC, inC, kernel_size=1, bias=False)
self.bn2 = nn.BatchNorm2d(inC)
self.conv2 = nn.Conv2d(inC, nC, kernel_size=3, padding=1, bias=False)
self.avg2 = nn.MaxPool2d(2)
def forward(self, x):
out = self.conv1(F.relu(self.bn1(x)))
out = self.conv2(F.relu(self.bn2(out)))
out = self.avg2(out)
return out
class FinalNet(nn.Module):
def __init__(self):
super(FinalNet, self).__init__()
model = resnet18(pretrained=True)
self.down_network = model()
self.up_network = inNet()
self.final_layer = inNet()
self.fc = nn.Linear(....)
def forward(self, x):
......
......