Update requirements.txt
Browse files- requirements.txt +2 -3
requirements.txt
CHANGED
@@ -1,9 +1,8 @@
|
|
1 |
-
numpy==1.26.4
|
2 |
Pillow==10.1.0
|
3 |
-
flash_attn
|
4 |
torch==2.1.2
|
5 |
torchvision==0.16.2
|
6 |
-
transformers==4.40.
|
7 |
sentencepiece==0.1.99
|
|
|
8 |
gradio
|
9 |
decord
|
|
|
|
|
1 |
Pillow==10.1.0
|
|
|
2 |
torch==2.1.2
|
3 |
torchvision==0.16.2
|
4 |
+
transformers==4.40.2
|
5 |
sentencepiece==0.1.99
|
6 |
+
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.2/flash_attn-2.6.2+cu123torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
7 |
gradio
|
8 |
decord
|