"""
llm_config.py 模块
LLM模型配置类（支持本地模型和API）
"""
from dataclasses import dataclass
import os
from dotenv import load_dotenv

load_dotenv()

@dataclass
class LLMConfig:
    """LLM配置类（支持DeepSeek本地模型和API）"""
    # 模型类型（本地模型：deepseek；API：deepseek_api）
    model_type: str = os.getenv("LLM_MODEL_TYPE", "deepseek_api")  # 默认使用API

    # ==== DeepSeek API配置 ====
    deepseek_api_key: str = os.getenv("DEEPSEEK_API_KEY", "")  # 你的sk-xxx密钥
    deepseek_api_base_url: str = os.getenv(
        "DEEPSEEK_API_BASE_URL",
        "https://api.deepseek.com"  # 官方API地址
    )
    deepseek_api_temperature: float = float(os.getenv("DEEPSEEK_TEMPERATURE", 0.7))
    deepseek_api_max_new_tokens: int = int(os.getenv("DEEPSEEK_MAX_NEW_TOKENS", 1024))

    # ==== 本地DeepSeek模型配置（如需切换回本地可保留） ====
    deepseek_model_path: str = os.getenv("DEEPSEEK_MODEL_PATH", "./models/llms/DeepSeek-7B-Chat")
    deepseek_device: str = os.getenv("DEEPSEEK_DEVICE", "auto")
