File size: 539 Bytes
97a05c0
 
 
 
 
 
 
 
 
 
 
 
 
b13dc46
97a05c0
 
 
 
 
 
 
dcd4560
97a05c0
dcd4560
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
accelerate==1.0.1
Pillow==9.3.0
decord==0.6.0
gradio==4.31.5
ninja==1.11.1.1
omegaconf==2.3.0
openai==1.14.2
pathos==0.3.2
prettytable==3.10.0
protobuf==3.20.3
pycocoevalcap==1.2
pycocotools==2.0.8
requests==2.31.0
safetensors==0.4.3
scikit-learn==1.4.1.post1
scipy==1.13.0
tiktoken==0.6.0
torch==2.1.0
torchvision==0.16.0
torchaudio==2.1.0
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.7/flash_attn-2.5.7+cu122torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
transformers==4.47.0
triton==2.1.0
func_timeout==4.3.5