JeffreyXiang
commited on
Commit
•
58b6042
1
Parent(s):
06a0af5
Update requirements.txt
Browse files- requirements.txt +1 -1
requirements.txt
CHANGED
@@ -19,9 +19,9 @@ pymeshfix==0.17.0
|
|
19 |
igraph==0.11.8
|
20 |
git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
|
21 |
xformers==0.0.27.post2
|
22 |
-
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
23 |
kaolin==0.17.0
|
24 |
spconv-cu120==2.3.6
|
25 |
transformers==4.46.3
|
|
|
26 |
https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl?download=true
|
27 |
https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/nvdiffrast-0.3.3-py3-none-any.whl?download=true
|
|
|
19 |
igraph==0.11.8
|
20 |
git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
|
21 |
xformers==0.0.27.post2
|
|
|
22 |
kaolin==0.17.0
|
23 |
spconv-cu120==2.3.6
|
24 |
transformers==4.46.3
|
25 |
+
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
26 |
https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl?download=true
|
27 |
https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/nvdiffrast-0.3.3-py3-none-any.whl?download=true
|