Hi there, need your help!
I was using some older transformers code and it was working fine for last 2 month until suddenly it started giving this error in a place that never changed:
AssertionError Traceback (most recent call last)
<ipython-input-31-c365f437b895> in <module>()
9 tokenizer = tokenizer_class.from_pretrained(args['model_name'])
10
---> 11 model = model_class.from_pretrained(args['model_name'])
12 model.to(device);
13
3 frames
/usr/local/lib/python3.6/dist-packages/pytorch_transformers/modeling_utils.py in from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
534
535 # Instantiate model.
--> 536 model = cls(config, *model_args, **model_kwargs)
537
538 if state_dict is None and not from_tf:
/usr/local/lib/python3.6/dist-packages/pytorch_transformers/modeling_xlm.py in __init__(self, config)
842 self.num_labels = config.num_labels
843
--> 844 self.transformer = XLMModel(config)
845 self.sequence_summary = SequenceSummary(config)
846
/usr/local/lib/python3.6/dist-packages/pytorch_transformers/modeling_xlm.py in __init__(self, config)
543 if config.n_langs > 1 and config.use_lang_emb:
544 self.lang_embeddings = nn.Embedding(self.n_langs, self.dim)
--> 545 self.embeddings = nn.Embedding(self.n_words, self.dim, padding_idx=self.pad_index)
546 self.layer_norm_emb = nn.LayerNorm(self.dim, eps=config.layer_norm_eps)
547
/usr/local/lib/python3.6/dist-packages/torch/nn/modules/sparse.py in __init__(self, num_embeddings, embedding_dim, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse, _weight)
86 if padding_idx is not None:
87 if padding_idx > 0:
---> 88 assert padding_idx < self.num_embeddings, 'Padding_idx must be within num_embeddings'
89 elif padding_idx < 0:
90 assert padding_idx >= -self.num_embeddings, 'Padding_idx must be within num_embeddings'
AssertionError: Padding_idx must be within num_embeddings