I am doing parallel NN in PINN and I need to use transfer learning but I do not know how I can use it? Can someone please guide me on how to use different pre-trained networks in parallel NN?

``

```
class Net2_kc(nn.Module):
def __init__(self):
super(Net2_kc, self).__init__()
self.k3 = nn.Sequential(
nn.Linear(input_n,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,1),
)
self.k4 = nn.Sequential(
nn.Linear(input_n,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,1),
)
self.k5 = nn.Sequential(
nn.Linear(input_n,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,1),
)
self.k6 = nn.Sequential(
nn.Linear(input_n,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,1),
)
self.k7 = nn.Sequential(
nn.Linear(input_n,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,hidden_dim),
Swish(),
# nn.ReLU(),
nn.Linear(hidden_dim,1),
)
self.final = nn.Sequential(
#nn.Linear(2,1), #Doing just 2,1 gives very bad results!!!!!!!!!
nn.Linear(2,50),
nn.Linear(50,1),
)
def forward(self,x):
k0 = self.k3(x)
k1 = self.k4(x)
k2 = self.k5(x)
k3 = self.k6(x)
k4 = self.k5(x)
output = torch.cat(k0,k1,k2,k3,k4), dim=1 )
output2 = self.final(output)
return output2
```

``

How can i use pretrained network for k3,k4,k5,k6, and k7 separately?