I have this class.
class Net(torch.nn.Module):
def init(self, D_in, H, D_out):
super(Net, self).init()
self.fc1 = nn.Linear(D_in, H)
self.fc2 = nn.Linear(H, H)
self.fc3 = nn.Linear(H,H)
self.fc4 = nn.Linear(H, D_out)
self.active_1 = torch.nn.Tanh()
self.active_2 = torch.nn.Tanh()
self.active_3 = torch.nn.Tanh()
self.fc11 = nn.Linear(D_in, H)
self.fc22 = nn.Linear(H, H)
self.fc33 = nn.Linear(H, H)
self.fc_tot = nn.Linear(D_out, D_out)
##################################
def forward(self, x):
"""
In the forward function we accept a Tensor of input data and we must return
a Tensor of output data. We can use Modules defined in the constructor as
well as arbitrary operators on Tensors.
"""
y = self.active_1(self.fc1(x))
y = self.active_2(self.fc2(y))
y = self.active_3(self.fc3(y))
y = self.fc4(y)
y = y*y########################
y = self.fc_tot(y)
return y
As you see I have not to use these three functions( self.fc11 = nn.Linear(D_in, H),self.fc22 = nn.Linear(H, H),self.fc33 = nn.Linear(H, H)) in forward part, but when I delete from the class, the result gets completely change? why?