I tried the stackoverflow and other threads in forum but still my issues wasn’t resolved. I am a starter please help me understand what went wrong.
id_2_token = dict(enumerate(set(n for name in names for n in name),1))
token_2_id = {value:key for key,value in id_2_token.items()}
print(len(id_2_token))
print(len(token_2_id))
Output :
56
56
feature_id,target_id = batch_maker(names) #batching function
print(feature_id.shape) #Shape - [124,64,17]
#RNN MODEL
class CharMaker(nn.Module):
def __init__(self, input_size, hidden_size, output_size,n_layers=1):
super(CharMaker,self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.encoder = nn.Embedding(self.input_size, self.hidden_size)
self.rnn = nn.RNN(self.hidden_size,self.hidden_size, num_layers=1,batch_first=True)
self.linear = nn.Linear(self.hidden_size, self.output_size)
self.softmax = torch.nn.Softmax(dim=output_size)
def forward(self, inputs, hidden):
batch_size = inputs.size(0)
if hidden == None:
hidden = torch.zeros(1,inputs.size(1),self.hidden_size)
print(inputs.shape)
encoded = self.encoder(inputs)
output, hidden = self.rnn(encoded, hidden)
outout = self.linear(hidden,self.output_size)
output = self.softmax(output)
return output,hidden
Initializing my model
cm = CharMaker(input_size=len(token_2_id),hidden_size=20,output_size=len(token_2_id))
Reshaping and Texting The Data
hidden = None
names_id_tensor = torch.from_numpy(features_id[0])
names_id_tensor = names_id_tensor.reshape(names_id_tensor.shape[0],names_id_tensor.shape[1],1)
Shapes
print(names_id_tensor.shape) #torch.Size([64, 17, 1])
output,hidden = cm(names_id_tensor,hidden)
Error:
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-139-d0d9f66f3192> in <module>
----> 1 output,hidden = cm(names_id_tensor,hidden)
~/.local/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
548 result = self._slow_forward(*input, **kwargs)
549 else:
--> 550 result = self.forward(*input, **kwargs)
551 for hook in self._forward_hooks.values():
552 hook_result = hook(self, input, result)
<ipython-input-129-f8a6cdd31a7a> in forward(self, inputs, hidden)
19 hidden = torch.zeros(1,inputs.size(1),self.hidden_size)
20 print(inputs.shape)
---> 21 encoded = self.encoder(inputs)
22 output, hidden = self.rnn(encoded, hidden)
23 outout = self.linear(hidden,self.output_size)
~/.local/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
548 result = self._slow_forward(*input, **kwargs)
549 else:
--> 550 result = self.forward(*input, **kwargs)
551 for hook in self._forward_hooks.values():
552 hook_result = hook(self, input, result)
~/.local/lib/python3.6/site-packages/torch/nn/modules/sparse.py in forward(self, input)
112 return F.embedding(
113 input, self.weight, self.padding_idx, self.max_norm,
--> 114 self.norm_type, self.scale_grad_by_freq, self.sparse)
115
116 def extra_repr(self):
~/.local/lib/python3.6/site-packages/torch/nn/functional.py in embedding(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse)
1722 # remove once script supports set_grad_enabled
1723 _no_grad_embedding_renorm_(weight, input, max_norm, norm_type)
-> 1724 return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
1725
1726
IndexError: index out of range in self