# This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o requirements.txt accelerate==1.0.0 # via shap-e (pyproject.toml) aiofiles==23.2.1 # via gradio annotated-types==0.7.0 # via pydantic anyio==4.6.0 # via # gradio # httpx # starlette certifi==2024.8.30 # via # httpcore # httpx # requests charset-normalizer==3.4.0 # via requests click==8.1.7 # via # typer # uvicorn diffusers==0.30.3 # via shap-e (pyproject.toml) exceptiongroup==1.2.2 # via anyio fastapi==0.115.0 # via gradio ffmpy==0.4.0 # via gradio filelock==3.16.1 # via # diffusers # huggingface-hub # torch # transformers # triton fsspec==2024.9.0 # via # gradio-client # huggingface-hub # torch gradio==5.0.1 # via # shap-e (pyproject.toml) # spaces gradio-client==1.4.0 # via gradio h11==0.14.0 # via # httpcore # uvicorn httpcore==1.0.6 # via httpx httpx==0.27.2 # via # gradio # gradio-client # spaces huggingface-hub==0.25.2 # via # accelerate # diffusers # gradio # gradio-client # tokenizers # transformers idna==3.10 # via # anyio # httpx # requests importlib-metadata==8.5.0 # via diffusers jinja2==3.1.4 # via # gradio # torch markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 # via # gradio # jinja2 mdurl==0.1.2 # via markdown-it-py mpmath==1.3.0 # via sympy networkx==3.3 # via torch numpy==1.26.4 # via # accelerate # diffusers # gradio # pandas # torchvision # transformers # trimesh nvidia-cublas-cu12==12.1.3.1 # via # nvidia-cudnn-cu12 # nvidia-cusolver-cu12 # torch nvidia-cuda-cupti-cu12==12.1.105 # via torch nvidia-cuda-nvrtc-cu12==12.1.105 # via torch nvidia-cuda-runtime-cu12==12.1.105 # via torch nvidia-cudnn-cu12==9.1.0.70 # via torch nvidia-cufft-cu12==11.0.2.54 # via torch nvidia-curand-cu12==10.3.2.106 # via torch nvidia-cusolver-cu12==11.4.5.107 # via torch nvidia-cusparse-cu12==12.1.0.106 # via # nvidia-cusolver-cu12 # torch nvidia-nccl-cu12==2.20.5 # via torch nvidia-nvjitlink-cu12==12.6.77 # via # nvidia-cusolver-cu12 # nvidia-cusparse-cu12 nvidia-nvtx-cu12==12.1.105 # via torch orjson==3.10.7 # via gradio packaging==24.1 # via # accelerate # gradio # gradio-client # huggingface-hub # spaces # transformers pandas==2.2.3 # via gradio pillow==10.4.0 # via # diffusers # gradio # torchvision psutil==5.9.8 # via # accelerate # spaces pydantic==2.9.2 # via # fastapi # gradio # spaces pydantic-core==2.23.4 # via pydantic pydub==0.25.1 # via gradio pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via pandas python-multipart==0.0.12 # via gradio pytz==2024.2 # via pandas pyyaml==6.0.2 # via # accelerate # gradio # huggingface-hub # transformers regex==2024.9.11 # via # diffusers # transformers requests==2.32.3 # via # diffusers # huggingface-hub # spaces # transformers rich==13.9.2 # via typer ruff==0.6.9 # via gradio safetensors==0.4.5 # via # accelerate # diffusers # transformers semantic-version==2.10.0 # via gradio shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 # via # anyio # httpx spaces==0.30.3 # via shap-e (pyproject.toml) starlette==0.38.6 # via fastapi sympy==1.13.3 # via torch tokenizers==0.20.0 # via transformers tomlkit==0.12.0 # via gradio torch==2.4.0 # via # shap-e (pyproject.toml) # accelerate # torchvision torchvision==0.19.0 # via shap-e (pyproject.toml) tqdm==4.66.5 # via # huggingface-hub # transformers transformers==4.45.2 # via shap-e (pyproject.toml) trimesh==4.4.9 # via shap-e (pyproject.toml) triton==3.0.0 # via torch typer==0.12.5 # via gradio typing-extensions==4.12.2 # via # anyio # fastapi # gradio # gradio-client # huggingface-hub # pydantic # pydantic-core # rich # spaces # torch # typer # uvicorn tzdata==2024.2 # via pandas urllib3==2.2.3 # via requests uvicorn==0.31.0 # via gradio websockets==12.0 # via gradio-client zipp==3.20.2 # via importlib-metadata