Keep getting RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn

I am trying to code up a Siamese network for sentence similarity. However, I keep running into the same RuntimeError problem.

class LSTMSentenceEncoder(nn.Module):
    def __init__(self,input_size,hidden_size,num_layers,vectors,word2idx):
        super(LSTMSentenceEncoder,self).__init__()
        self.embedding = nn.Embedding.from_pretrained(vectors,freeze=False,padding_idx=word2idx['_PAD'])
        
        self.lstm = nn.LSTM(input_size,hidden_size,num_layers,batch_first=True)
        
        self.hidden_size = hidden_size
        self.num_layers = num_layers
        self.word2idx = word2idx
        self.input_size = input_size
        
    def forward(self,x):
        if torch.cuda.is_available():
            h0 = Variable(torch.randn(self.num_layers,x.size(0),self.hidden_size),requires_grad=True).cuda()
            c0 = Variable(torch.randn(self.num_layers,x.size(0),self.hidden_size),requires_grad=True).cuda()
            
            embs = self.embedding(x)
            embs = embs.view(x.size(0),-1,self.input_size).requires_grad_().cuda()
            
        else:
            h0 = Variable(torch.randn(self.num_layers,x.size(0),self.hidden_size),requires_grad=True)
            c0 = Variable(torch.randn(self.num_layers,x.size(0),self.hidden_size),requires_grad=True)
        
            embs = self.embedding(x)
            embs = embs.view(x.size(0),-1,self.input_size)

        out,(hn,cn) = self.lstm(embs,(h0,c0))
        out = out[:, -1, :]
        return out,hn

class SiameseLSTM(nn.Module):
    def __init__(self,input_size,hidden_size,num_layers,vectors,word2idx):
        super(SiameseLSTM, self).__init__()
        self.encoder = LSTMSentenceEncoder(input_size,hid_size,num_layers,vectors,word2idx)

    def forward(self, s1, s2):
        outputs = []
        
        for i in range(len(s1)):
            v1,h1 = self.encoder(s1[i])
            v2,h2 = self.encoder(s2[i])
            # to select the final hidden representation
            h1 = h1[-1,-1,:]
            h2 = h2[-1,-1,:]

            out = torch.exp(-torch.norm((h1 - h2), 1))
            outputs.append(out)

        return torch.tensor(outputs).cuda()

model = SiameseLSTM(input_size,hid_size,num_layers,wordEmbs,word2idx)
if torch.cuda.is_available():
    model = model.cuda()
model.float()

count = 0
seq_dim = 15
num_epochs = 5

optimizer = torch.optim.Adam(model.parameters(),lr = 0.1)
criterion = torch.nn.MSELoss()

for epoch in tqdm(range(num_epochs)):
    for i, (s1,s2, labels) in enumerate(trainLoader):
        if torch.cuda.is_available():
            s1 = Variable(s1.view(-1, seq_dim, 1).cuda())
            s2 = Variable(s2.view(-1, seq_dim, 1).cuda())
            labels = Variable(labels.cuda())
            
        else:
            s1 = Variable(s1.view(-1, seq_dim, 1),)
            s2 = Variable(s2.view(-1, seq_dim, 1),)
            labels = Variable(labels)
    
        optimizer.zero_grad()
        
        outputs = model(s1,s2)
        
        loss = criterion(outputs, labels)
        
        loss.backward()
        
        optimizer.step()
        
        count += 1

The forward pass seems to be working as I am experiencing no error there but when trying to backpropagate loss I keep facing this issue.
Can someone help debug this, I am unable to identify what is wrong here.

This should have gotten you

UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).

Of course, you actually want to do something entirely differen, namely you should use torch.stack there.
I would also recommend to revisit using the use of Variable, it was deprecated with PyTorch 0.4 a year and a half ago.

Best regards

Thomas

1 Like

Hi tom, I did not receive the UserWarning that you are mentioning.

However, using torch.stack fixed my issue. Thank you so much. Also I will update my code to not use variables anymore. Thank you