runtime error
blocks.py - warnings.py . Make sure to double-check they do not contain any added malicious code. To avoid downloading new versions of the code file, you can pin a revision. Traceback (most recent call last): File "/home/user/app/app.py", line 32, in <module> generate = pipeline( File "/home/user/app/quick_pipeline.py", line 34, in __init__ self.model = AutoModelForCausalLM.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 524, in from_pretrained config, kwargs = AutoConfig.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py", line 974, in from_pretrained config_class = get_class_from_dynamic_module( File "/usr/local/lib/python3.10/site-packages/transformers/dynamic_module_utils.py", line 514, in get_class_from_dynamic_module return get_class_in_module(class_name, final_module) File "/usr/local/lib/python3.10/site-packages/transformers/dynamic_module_utils.py", line 212, in get_class_in_module module_spec.loader.exec_module(module) File "<frozen importlib._bootstrap_external>", line 883, in exec_module File "<frozen importlib._bootstrap>", line 241, in _call_with_frames_removed File "/home/user/.cache/huggingface/modules/transformers_modules/mosaicml/mpt-7b-instruct/7bf8dfd6c819cdb82e2f9d0b251f79ddd33314fb/configuration_mpt.py", line 5, in <module> from .attention import check_alibi_support, is_flash_v1_installed, is_flash_v2_installed File "/home/user/.cache/huggingface/modules/transformers_modules/mosaicml/mpt-7b-instruct/7bf8dfd6c819cdb82e2f9d0b251f79ddd33314fb/attention.py", line 34, in <module> if is_flash_v1_installed(): File "/home/user/.cache/huggingface/modules/transformers_modules/mosaicml/mpt-7b-instruct/7bf8dfd6c819cdb82e2f9d0b251f79ddd33314fb/attention.py", line 27, in is_flash_v1_installed return version.parse(flash_attn.__version__) < version.parse('2.0.0') AttributeError: module 'flash_attn' has no attribute '__version__'
Container logs:
Fetching error logs...