I am facing the following error -
TypeError: only integer tensors of a single element can be converted to an index
In the following code. I am even doing a forceful conversion to int datatype and yet I am facing this problem. It occurs when I try to access specific embedding layers.
userEmbeddings = self.userEmbeds[userIndex]
The complete code is:
class EmbeddingModel(nn.Module):
def __init__(self,userC,movieC,embedDim):
super(EmbeddingModel, self).__init__()
self.userEmbeds = nn.Embedding(userC, embedDim)
self.movieEmbeds = nn.Embedding(movieC, embedDim)
self.fc1 = nn.Linear(2*embedDim, embedDim)
self.relu = nn.ReLU()
self.fc2 = nn.Linear(embedDim, 5)
def forward(self, userIndex, movieIndex):
userIndex = userIndex.to(dtype = torch.int, device = device)
movieIndex = movieIndex.to(dtype = torch.int, device = device)
print(userIndex.type(),movieIndex.type())
# ----- Output - torch.cuda.IntTensor torch.cuda.IntTensor
# Error occurs in the following line --------
userEmbeddings = self.userEmbeds[userIndex]
movieEmbeddings = self.movieEmbeds[movieIndex]
inp = torch.cat([userEmbeddings,movieEmbeddings],1)
out = self.fc1(inp)
out = self.relu(out)
out = self.fc2(out)
return out
The following is my calling code for the model -
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
for epoch in range(epochs):
for i, (users,movies,ratings) in enumerate(train_loader):
if torch.cuda.is_available():
users = Variable(users.cuda())
movies = Variable(movies.cuda())
ratings = Variable(ratings.cuda())
else:
users = Variable(users)
movies = Variable(movies)
ratings = Variable(ratings)
outputs = model(users,movies)
How do I index my models embedding layer this way?