LlavaMistral1 / pyproject.toml
lorocksUMD's picture
Upload pyproject.toml
d0143e2 verified
raw
history blame
1.21 kB
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "llava"
version = "1.1.0"
description = "Towards GPT-4 like large language and visual assistant."
readme = "README.md"
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
]
dependencies = [
"einops", "fastapi", "gradio==3.35.2", "markdown2[all]", "numpy",
"requests", "sentencepiece", "tokenizers>=0.12.1",
"torch", "torchvision", "uvicorn", "wandb",
"shortuuid", "httpx==0.24.0",
"deepspeed==0.9.5",
"peft==0.4.0",
"transformers==4.31.0",
"accelerate==0.21.0",
"bitsandbytes==0.41.0",
"scikit-learn==1.2.2",
"sentencepiece==0.1.99",
"einops==0.6.1", "einops-exts==0.0.4", "timm==0.6.13",
"gradio_client==0.2.9"
]
[project.urls]
"Homepage" = "https://llava-vl.github.io"
"Bug Tracker" = "https://github.com/haotian-liu/LLaVA/issues"
[tool.setuptools.packages.find]
exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]
[tool.wheel]
exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]