from setuptools import setup, find_packages
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
import os

CUDA_HOME = os.environ.get("CUDA_HOME", "/usr/local/cuda-12.1")

setup(
    name="SwiftLLM",
    version="0.0.1",
    author="Shengyu Liu",
    description="A tiny yet powerful LLM inference system tailored for researching purpose",
    packages=find_packages(include=["swiftllm", "swiftllm.server", "swiftllm.worker"]),
    ext_modules=[
        CUDAExtension(
            name="swiftllm_c",
            sources=[
                "csrc/src/block_swapping.cpp",
                "csrc/src/entrypoints.cpp",
            ],
            include_dirs=[os.path.join(CUDA_HOME, "include")],
            extra_compile_args={
                "cxx": ["-O3", "-std=c++17"],
                "nvcc": ["-O3"]
            },
        )
    ],
    cmdclass={"build_ext": BuildExtension},
    zip_safe=False,
)
