How to update parameters with multiple loss functions

Hi guys, Im new in pytorch and I have met some problems in updating parameters while training model. For instance, I have 2 loss-functions: loss1 and loss2, let loss = loss1 + loss2. The model has feature extractor layers and classifier layers. And I want to update the parameters in feature extractor layers using loss and the parameters in classifier layers using loss1. How can I do that in pytorch??? Thanks!

@Khah_Nguyen you will have to manually turn on and off requires_grad while doing the backward. This will ensure that when the optimizer step function runs, the appropriate grads are stored in the node

import numpy as np
import time
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable

class random_model(nn.Module):
    def __init__(self, num_layers):
        super(random_model, self).__init__()
        self.model1 = nn.Sequential(nn.Linear(100, 20), nn.BatchNorm1d(20), nn.ReLU())
        self.model3 = nn.Linear(20, 1)
    def forward(self, x):
        x = self.model1(x)
        x = self.model3(x)
        return x
model = random_model(10)
loss1 = torch.nn.MSELoss()
loss2 = torch.nn.SmoothL1Loss()
optimizer = optim.Adam(model.parameters(), lr=0.001)

X = torch.rand(100, 100)
y = torch.rand(100)

for cur_epoch in range(100):
    output = model(X)
    cur_loss1 = loss1(output, y)
    cur_loss2 = loss2(output, y)
    cur_loss = cur_loss1 + cur_loss2
    for param in model.model3.parameters():
        param.requires_grad = False
    for param in model.model1.parameters():
        param.requires_grad = False
    for param in model.model3.parameters():
        param.requires_grad = True
    print("Epoch {0} Loss is {1}".format(cur_epoch, cur_loss.item()))

Ok I got it, Tks sir!