Optuna unfreeze layers

Hello, I face the next issue when freezing layers in the objective function. The layers remain with requires_grad = True:

def freeze_layer(model, layer_instance, layer_name, layer_numbers, freeze=True):
    for name, layer in model_ft.named_modules():
        for layer_number in layer_numbers:
            layer_id = '%s%d' %(layer_name, layer_number)
            if isinstance(layer, layer_instance):
                if name.startswith(layer_id):
                    for param in layer.parameters():
                        param.requires_grad_(not freeze)

    return model


def objective(trial):

    # Generate the model.
    base_model = models.resnet18(weights="DEFAULT")
    num_ftrs = base_model.fc.in_features
    base_model.fc = nn.Linear(num_ftrs, 10)

    layers2freeze = trial.suggest_categorical("freeze_layers", ['A', 'B', 'C'])
    map_layers2freeze = dict(A=[4], B=[4, 3], C=[4, 3, 2], D=[4, 3, 2, 1])
    layersF = map_layers2freeze[layers2freeze]

    model = freeze_layer(base_model, torch.nn.Conv2d, 'layer', layersF, True)

    for name, param in model.named_parameters():
        print('Name: ', name, 'Requires_Grad:', param.requires_grad)