ariG23498 HF Staff commited on
Commit
6a76e63
·
verified ·
1 Parent(s): 56d36af

Upload zai-org_GLM-ASR-Nano-2512_1.txt with huggingface_hub

Browse files
Files changed (1) hide show
  1. zai-org_GLM-ASR-Nano-2512_1.txt +3 -3
zai-org_GLM-ASR-Nano-2512_1.txt CHANGED
@@ -6,7 +6,7 @@ model = AutoModelForCausalLM.from_pretrained("zai-org/GLM-ASR-Nano-2512", trust_
6
 
7
  ERROR:
8
  Traceback (most recent call last):
9
- File "/tmp/zai-org_GLM-ASR-Nano-2512_19B7Ulz.py", line 25, in <module>
10
  model = AutoModelForCausalLM.from_pretrained("zai-org/GLM-ASR-Nano-2512", trust_remote_code=True, dtype="auto")
11
  File "/tmp/.cache/uv/environments-v2/1f148c168d860d2b/lib/python3.13/site-packages/transformers/models/auto/auto_factory.py", line 586, in from_pretrained
12
  model_class = get_class_from_dynamic_module(
@@ -19,8 +19,8 @@ Traceback (most recent call last):
19
  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^
20
  File "<frozen importlib._bootstrap_external>", line 1026, in exec_module
21
  File "<frozen importlib._bootstrap>", line 488, in _call_with_frames_removed
22
- File "/tmp/.cache/huggingface/modules/transformers_modules/zai_hyphen_org/GLM_hyphen_ASR_hyphen_Nano_hyphen_2512/179a99d189eee664e623e504186538c683deb0cc/modeling_glmasr.py", line 9, in <module>
23
  from .modeling_audio import WhisperSpecialEncoder
24
- File "/tmp/.cache/huggingface/modules/transformers_modules/zai_hyphen_org/GLM_hyphen_ASR_hyphen_Nano_hyphen_2512/179a99d189eee664e623e504186538c683deb0cc/modeling_audio.py", line 7, in <module>
25
  from transformers.models.whisper.modeling_whisper import WhisperEncoder, WhisperEncoderLayer, WhisperFlashAttention2
26
  ImportError: cannot import name 'WhisperFlashAttention2' from 'transformers.models.whisper.modeling_whisper' (/tmp/.cache/uv/environments-v2/1f148c168d860d2b/lib/python3.13/site-packages/transformers/models/whisper/modeling_whisper.py)
 
6
 
7
  ERROR:
8
  Traceback (most recent call last):
9
+ File "/tmp/zai-org_GLM-ASR-Nano-2512_18xyiyT.py", line 25, in <module>
10
  model = AutoModelForCausalLM.from_pretrained("zai-org/GLM-ASR-Nano-2512", trust_remote_code=True, dtype="auto")
11
  File "/tmp/.cache/uv/environments-v2/1f148c168d860d2b/lib/python3.13/site-packages/transformers/models/auto/auto_factory.py", line 586, in from_pretrained
12
  model_class = get_class_from_dynamic_module(
 
19
  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^
20
  File "<frozen importlib._bootstrap_external>", line 1026, in exec_module
21
  File "<frozen importlib._bootstrap>", line 488, in _call_with_frames_removed
22
+ File "/tmp/.cache/huggingface/modules/transformers_modules/zai_hyphen_org/GLM_hyphen_ASR_hyphen_Nano_hyphen_2512/91967eab799804ab256a3819a085b92378906eb2/modeling_glmasr.py", line 9, in <module>
23
  from .modeling_audio import WhisperSpecialEncoder
24
+ File "/tmp/.cache/huggingface/modules/transformers_modules/zai_hyphen_org/GLM_hyphen_ASR_hyphen_Nano_hyphen_2512/91967eab799804ab256a3819a085b92378906eb2/modeling_audio.py", line 7, in <module>
25
  from transformers.models.whisper.modeling_whisper import WhisperEncoder, WhisperEncoderLayer, WhisperFlashAttention2
26
  ImportError: cannot import name 'WhisperFlashAttention2' from 'transformers.models.whisper.modeling_whisper' (/tmp/.cache/uv/environments-v2/1f148c168d860d2b/lib/python3.13/site-packages/transformers/models/whisper/modeling_whisper.py)