Hi, I want to ask about the difference between the following two pieces of code:
class ModelOutputs():
""" Class for making a forward pass, and getting:
1. The network output.
2. Activations from intermeddiate targetted layers.
3. Gradients from intermeddiate targetted layers. """
def __init__(self, model, target_layers):
self.model = model
self.target_layers = target_layers
self.gradients = None
def save_gradient(self, grad):
self.gradients = grad
def __call__(self, x):
conv_outputs = None
for name, module in self.model.features._modules.items():
x = module(x)
if name in self.target_layers:
x.register_hook(self.save_gradient)
conv_outputs = x
output = x
output = output.view(output.size(0), -1)
output = self.model.classifier(output)
return output, conv_outputs
I passed vgg to model in ModelOutputs()
and
class VGG(nn.Module):
def __init__(self, vgg_name):
super(VGG, self).__init__()
self.features = self._make_layers(cfg[vgg_name])
self.classifier = nn.Linear(512, 10)
self.gradients = None
def save_gradient(self, grad):
self.gradients = grad
def forward(self, x):
x = self.features(x)
x.register_hook(self.save_gradient)
inter = x
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x, inter
I’m just curious about what the difference between the two pieces of code above for backward and forward is? When the loss backward, will the gradients be different?
Could someone help me?
Appreciated for your help!