class CBOWModeler(nn.Module):
def __init__(self, vocab_size, embedding_dim, context_size):
super(CBOWModeler, self).__init__()
self.embeddings = nn.Embedding(vocab_size, embedding_dim)
self.linear1 = nn.Linear(context_size*embedding_dim, 128)
self.linear2 = nn.Linear(128, vocab_size)
def forward(self, x):
embeds = self.embeddings(x).view(1,-1)
output = self.linear1(embeds)
output = F.relu(output)
output = self.linear2(output)
log_probs = F.log_softmax(output, dim=1)
return log_probs
According to the paper, I think there should be 1 linear, but the code has 2, can anyone help answer??