File size: 403 Bytes
5178ef1
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
accelerate==1.10.1
einops==0.8.1
flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
hf-transfer==0.1.9
hf-xet==1.1.8
huggingface-hub==0.34.4
imageio==2.37.0
imageio-ffmpeg==0.6.0
safetensors==0.6.2
sentencepiece==0.2.1
torch==2.7.0
torchao==0.12.0
torchvision==0.22.0
transformers==4.55.4