Stop updating some weight during the training of an NN model

During training a model from the below code, I am trying not to update the 1st weight parameter (0.14) of the 2nd layer (linear2). And the other weights will update according to the gradient descent. How can I do this? I am trying this in the following way. But it is updating all weights of the model.

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

class MyNet(nn.Module):
    def __init__(self):
            super(MyNet, self).__init__()
            self.linear1 = nn.Linear(2, 2,  bias=None)
            self.linear1.weight = torch.nn.Parameter(torch.tensor([[0.11, 0.21], [0.12, 0.08]]))
            self.linear2 = nn.Linear(2, 1,  bias=None)
            self.linear2.weight = torch.nn.Parameter(torch.tensor([[0.14, 0.15]]))

    def forward(self, inputs):
            out = self.linear1(inputs)
            out = self.linear2(out)
            return out

losses = []
loss_function = nn.L1Loss()
model = MyNet()
optimizer = optim.SGD(model.parameters(), lr=0.05)
input = torch.tensor([2.0,3.0])
print('weights before backpropagation = ',   list(model.parameters()))

for epoch in range(1):
        result = model(input )
        loss = loss_function(result , torch.tensor([1.00],dtype=torch.float))
        print('result = ', result)
        print("loss = ",   loss)
        
        with torch.no_grad():
                model.linear2.weight[0, 0] = .14

        model.zero_grad()
        loss.backward()
        print('gradients =', [x.grad.data  for x in model.parameters()] )
        optimizer.step()
        print('weights after backpropagation = ',   list(model.parameters()))