Set weights for embedding layer

Hello, I tried to initialize the weights of the embedding layer with my own embedding, by methods below _create_emb_layer.

I am so confused why the weights changed after init the model.

class clf(nn.Module):

def __init__(self, weight_matrix):        
    super(clf, self).__init__()
    self.embedding, self.vocal_size, self.embed_dim = self._create_emb_layer(weight_matrix, trainable=False)
    **print('original matrix:', weight_matrix[0])**
    **print('after init matrix', self.embedding.weight.detach().numpy()[0])**

def _create_emb_layer(self, weight_matrix, trainable=False):
    num_embeddings, embedding_dim = weight_matrix.shape
    emb_layer = nn.Embedding(num_embeddings, embedding_dim)
    emb_layer.weights = torch.nn.Parameter(torch.from_numpy(weight_matrix))
    if trainable:
        emb_layer.weight.requires_grad = True   
    else:
        emb_layer.weight.requires_grad = False
    return emb_layer, num_embeddings, embedding_dim
2 Likes

Found out the problem – I set emb_layer.weights instead of emb_layer.weight. Found out by printing out state_dict.

2 Likes