Hi I’m a newbie in Pytorch.
I try to create a trainable model having a fixed multiplication and addition operation to the sub-models, which shown as below.
Here tensor X is training data, and var_in is also an input tensor varying by each set of training data used to multiple the output of the sub-model.
Below is my attempt. But I don’t get a reasonable result.
Is there anything wrong here? Or is it possible to achieve a model like this?
Thanks for any suggestion.
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
self.l1_1 = nn.Linear(3, 600)
self.l2_1 = nn.Linear(600, 200)
self.l3_1 = nn.Linear(200, 1)
self.l1_2 = nn.Linear(3, 600)
self.l2_2 = nn.Linear(600, 200)
self.l3_2 = nn.Linear(200, 1)
self.l1_3 = nn.Linear(3, 600)
self.l2_3 = nn.Linear(600, 200)
self.l3_3 = nn.Linear(200, 1)
def forward(self, x, var_in):
out_1 = F.relu(self.l1_1(x))
out_1 = F.relu(self.l2_1(out_1))
out_1 = self.l3_1(out_1)
out_2 = F.relu(self.l1_2(x))
out_2 = F.relu(self.l2_2(out_2))
out_2 = self.l3_2(out_2)
out_3 = F.relu(self.l1_3(x))
out_3 = F.relu(self.l2_3(out_3))
out_3 = self.l3_3(out_3)
Finalout = var_in[:,0].view(-1,1) * out_1 + var_in[:,1].view(-1,1) * out_2 + var_in[:,2].view(-1,1) * out_3
return Finalout
y_pred = model.forward(TrainData_input,Var_in)
l = loss(y_pred, TrainData_output)
l.backward()