"""
ConnLLM安装脚本
"""
from setuptools import setup, find_packages

# 读取版本信息
def read_version():
    try:
        with open("connllm/__init__.py", "r") as f:
            for line in f:
                if line.startswith("__version__"):
                    # 从形如 __version__ = "0.1.0" 的行中提取版本号
                    delim = '"' if '"' in line else "'"
                    return line.split(delim)[1]
    except (IOError, IndexError):
        pass
    return "0.1.0"

# 读取README作为长描述
def read_readme():
    try:
        with open("README.md", "r", encoding="utf-8") as f:
            return f.read()
    except IOError:
        return "ConnLLM - 统一的LLM适配层，支持多种提供商和模型"

setup(
    name="connllm",
    version=read_version(),
    description="统一的LLM适配层，支持多种提供商和模型",
    long_description=read_readme(),
    long_description_content_type="text/markdown",
    author="Your Name",
    author_email="your.email@example.com",
    url="https://github.com/your-username/ConnLLM",
    packages=find_packages(),
    classifiers=[
        "Development Status :: 3 - Alpha",
        "Intended Audience :: Developers",
        "License :: OSI Approved :: MIT License",
        "Programming Language :: Python :: 3",
        "Programming Language :: Python :: 3.8",
        "Programming Language :: Python :: 3.9",
        "Programming Language :: Python :: 3.10",
        "Programming Language :: Python :: 3.11",
    ],
    python_requires=">=3.8",
    install_requires=[
        "requests>=2.25.0",
    ],
    extras_require={
        # 按提供商分组的可选依赖
        "anthropic": ["anthropic>=0.6.0"],
        "openai": ["openai>=1.0.0"],
        "openrouter": ["openai>=1.0.0"],  # OpenRouter使用OpenAI兼容API
        "moonshot": ["requests>=2.25.0"],  # Moonshot仅需基本HTTP请求
        "ollama": ["requests>=2.25.0"],    # Ollama仅需基本HTTP请求
        
        # 特定功能的可选依赖
        "tokenizers": ["tiktoken>=0.5.0", "anthropic>=0.6.0"],
        
        # 开发依赖
        "dev": [
            "pytest>=7.0.0",
            "black>=23.0.0",
            "isort>=5.0.0",
            "mypy>=1.0.0",
            "ruff>=0.0.100"
        ],
        
        # 全部依赖
        "all": [
            "anthropic>=0.6.0",
            "openai>=1.0.0",
            "tiktoken>=0.5.0",
        ],
    },
    entry_points={
        "console_scripts": [
            "connllm=connllm.cli:main",
        ],
    },
)
