I have a network such as
class NeuralNet(nn.Module):
def __init__(self):
super(NeuralNet, self).__init__()
self.fc1 = nn.Linear(10, 5)
self.fc2 = nn.Linear(5, 2)
def forward(self, x):
x = nn.functional.relu(self.fc1(x))
x = nn.functional.relu(self.fc2(x))
return x
How can I do losses if i want to split them up? e.g
class Layer1(nn.Module):
def __init__(self):
super(NeuralNet, self).__init__()
self.fc1 = nn.Linear(10, 5)
def forward(self, x):
x = nn.functional.relu(self.fc1(x)
return x
class Layer2(nn.Module):
def __init__(self):
super(NeuralNet, self).__init__()
self.fc2 = nn.Linear(5, 2)
def forward(self, x):
x = nn.functional.relu(self.fc2(x)
return x
The only loss is from the final layer where it compares to a label.
How can you call .backward() on this?