MobiLlama / pyproject.toml
Ashmal's picture
Upload folder using huggingface_hub
5472531 verified
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "fschat"
version = "0.2.36"
description = "An open platform for training, serving, and evaluating large language model based chatbots."
readme = "README.md"
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
]
dependencies = [
"aiohttp", "fastapi", "httpx", "markdown2[all]", "nh3", "numpy",
"prompt_toolkit>=3.0.0", "pydantic", "psutil", "requests", "rich>=10.0.0",
"shortuuid", "tiktoken", "uvicorn",
]
[project.optional-dependencies]
model_worker = ["accelerate>=0.21", "peft", "sentencepiece", "torch", "transformers>=4.31.0", "protobuf"]
webui = ["gradio>=4.10"]
train = ["einops", "flash-attn>=2.0", "wandb"]
llm_judge = ["openai<1", "anthropic>=0.3", "ray"]
dev = ["black==23.3.0", "pylint==2.8.2"]
[project.urls]
"Homepage" = "https://github.com/lm-sys/fastchat"
"Bug Tracker" = "https://github.com/lm-sys/fastchat/issues"
[tool.setuptools.packages.find]
exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]
[tool.wheel]
exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"]