I am trying to execute following code torch 2.5 and transformers 4.44
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
@torch.compile(backend="inductor")
def test_gpt2_demo():
tokenizer = AutoTokenizer.from_pretrained("gpt2")
model = AutoModelForCausalLM.from_pretrained("gpt2").to(torch.float)
prompt = "Thanks for"
print("\nInput prompt: ", prompt, "\n")
# run on CPU
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
gen_tokens = model.generate(
input_ids,
max_length=4,
do_sample=False,
pad_token_id=tokenizer.eos_token_id,
)
gen_text = tokenizer.batch_decode(gen_tokens)[0]
print("CPU output: ", gen_text, "\n")
test_gpt2_demo()
However, I am getting following error.
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/constant.py", line 44, in create
assert not isinstance(value, disallowed_type), reason
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AssertionError: Dict types must use ConstDictVariable.
can someone please help understand what’s going wrong?
Complete error log :
/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/functions.py:729: UserWarning: Graph break due to unsupported builtin posix.stat. This function is either a Python builtin (e.g. _warnings.warn) or a third-party C/C++ Python extension (perhaps created with pybind). If it is a Python builtin, please file an issue on GitHub so the PyTorch team can add support for it and see the next case for a workaround. If it is a third-party C/C++ Python extension, please either wrap it into a PyTorch-understood custom operator (see https://pytorch.org/tutorials/advanced/custom_ops_landing_page.html for more details) or, if it is traceable, use torch.compiler.allow_in_graph.
torch._dynamo.utils.warn_once(msg)
/usr/local/lib/python3.11/dist-packages/transformers/tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https://github.com/huggingface/transformers/issues/31884
warnings.warn(
W0905 14:42:05.444000 89071 torch/_dynamo/convert_frame.py:834] [6/8] torch._dynamo hit config.cache_size_limit (8)
W0905 14:42:05.444000 89071 torch/_dynamo/convert_frame.py:834] [6/8] function: '__init_subclass__' (/usr/local/lib/python3.11/dist-packages/transformers/utils/generic.py:324)
W0905 14:42:05.444000 89071 torch/_dynamo/convert_frame.py:834] [6/8] last reason: 6/0: ___check_obj_id(L['cls'], 164468560)
W0905 14:42:05.444000 89071 torch/_dynamo/convert_frame.py:834] [6/8] To log all recompilation reasons, use TORCH_LOGS="recompiles".
W0905 14:42:05.444000 89071 torch/_dynamo/convert_frame.py:834] [6/8] To diagnose recompilation issues, see https://pytorch.org/docs/main/torch.compiler_troubleshooting.html.
/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/functions.py:729: UserWarning: Graph break due to unsupported builtin None.safe_open.__new__. This function is either a Python builtin (e.g. _warnings.warn) or a third-party C/C++ Python extension (perhaps created with pybind). If it is a Python builtin, please file an issue on GitHub so the PyTorch team can add support for it and see the next case for a workaround. If it is a third-party C/C++ Python extension, please either wrap it into a PyTorch-understood custom operator (see https://pytorch.org/tutorials/advanced/custom_ops_landing_page.html for more details) or, if it is traceable, use torch.compiler.allow_in_graph.
torch._dynamo.utils.warn_once(msg)
Traceback (most recent call last):
File "/local/mnt/workspace/users/vpandya/simple_gpt2_triton_cpu.py", line 25, in <module>
test_gpt2_demo()
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/eval_frame.py", line 465, in _fn
return fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/simple_gpt2_triton_cpu.py", line 6, in test_gpt2_demo
@torch.compile(backend="inductor")
File "/local/mnt/workspace/users/vpandya/simple_gpt2_triton_cpu.py", line 8, in torch_dynamo_resume_in_test_gpt2_demo_at_8
tokenizer = AutoTokenizer.from_pretrained("gpt2")
File "/usr/local/lib/python3.11/dist-packages/transformers/models/auto/auto_factory.py", line 564, in from_pretrained
return model_class.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 2905, in from_pretrained
@classmethod
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3223, in torch_dynamo_resume_in_from_pretrained_at_3223
if not isinstance(config, PretrainedConfig):
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3330, in torch_dynamo_resume_in_from_pretrained_at_3330
if not isinstance(config, PretrainedConfig):
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3355, in torch_dynamo_resume_in_from_pretrained_at_3355
config = copy.deepcopy(config)
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3409, in torch_dynamo_resume_in_from_pretrained_at_3409
is_local = os.path.isdir(pretrained_model_name_or_path)
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3487, in torch_dynamo_resume_in_from_pretrained_at_3487
elif os.path.isfile(os.path.join(subfolder, pretrained_model_name_or_path)):
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3490, in torch_dynamo_resume_in_from_pretrained_at_3490
elif os.path.isfile(os.path.join(subfolder, pretrained_model_name_or_path + ".index")):
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 3815, in torch_dynamo_resume_in_from_pretrained_at_3498
# make sure we use the model's config since the __init__ call might have copied it
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/dist-packages/transformers/models/gpt2/modeling_gpt2.py", line 1190, in __init__
self.transformer = GPT2Model(config)
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/dist-packages/transformers/models/gpt2/modeling_gpt2.py", line 911, in __init__
self.post_init()
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 1244, in __call__
return self._torchdynamo_orig_callable(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 1045, in __call__
result = self._inner_convert(
^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 516, in __call__
return _compile(
^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 908, in _compile
guarded_code = compile_inner(code, one_graph, hooks, transform)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 656, in compile_inner
return _compile_inner(code, one_graph, hooks, transform)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_utils_internal.py", line 87, in wrapper_function
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 689, in _compile_inner
out_code = transform_code_object(code, transform)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/bytecode_transformation.py", line 1322, in transform_code_object
transformations(instructions, code_options)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 210, in _fn
return fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/convert_frame.py", line 624, in transform
tracer.run()
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 2796, in run
super().run()
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 983, in run
while self.step():
^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 895, in step
self.dispatch_table[inst.opcode](self, inst)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 582, in wrapper
return inner_fn(self, inst)
^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 2279, in CALL
self._call(inst)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 2273, in _call
self.call_function(fn, args, kwargs)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 830, in call_function
self.push(fn.call_function(self, args, kwargs)) # type: ignore[arg-type]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/functions.py", line 383, in call_function
return super().call_function(tx, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/functions.py", line 322, in call_function
return super().call_function(tx, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/functions.py", line 106, in call_function
return tx.inline_user_function_return(self, [*self.self_args(), *args], kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 836, in inline_user_function_return
return InliningInstructionTranslator.inline_call(self, fn, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 3011, in inline_call
return cls.inline_call_(parent, func, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 3139, in inline_call_
tracer.run()
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 983, in run
while self.step():
^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 895, in step
self.dispatch_table[inst.opcode](self, inst)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 1744, in LOAD_ATTR
self._load_attr(inst)
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/symbolic_convert.py", line 1734, in _load_attr
result = BuiltinVariable(getattr).call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 982, in call_function
return handler(tx, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 726, in <lambda>
return lambda tx, args, kwargs: obj.call_function(
^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 982, in call_function
return handler(tx, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 863, in builtin_dispatch
rv = fn(tx, args, kwargs)
^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 781, in call_self_handler
result = self_handler(tx, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/builtin.py", line 1769, in call_getattr
return obj.var_getattr(tx, name)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/dicts.py", line 919, in var_getattr
return ConstantVariable.create(getattr(self.obj, name))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/local/mnt/workspace/users/vpandya/pytorch-upstream/install/local/lib/python3.11/dist-packages/torch/_dynamo/variables/constant.py", line 44, in create
assert not isinstance(value, disallowed_type), reason
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AssertionError: Dict types must use ConstDictVariable.
from user code:
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 1381, in post_init
self.init_weights()
File "/usr/local/lib/python3.11/dist-packages/transformers/modeling_utils.py", line 2294, in init_weights
if self.config.pruned_heads:
Set TORCH_LOGS="+dynamo" and TORCHDYNAMO_VERBOSE=1 for more information
You can suppress this exception and fall back to eager by setting:
import torch._dynamo
torch._dynamo.config.suppress_errors = True