"""
LLM应用配置管理器
"""

import os
import yaml
import logging
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Any, Optional, List

# 尝试导入python-dotenv，如果没有安装则跳过
try:
    from dotenv import load_dotenv
    _dotenv_available = True
except ImportError:
    _dotenv_available = False

logger = logging.getLogger(__name__)


@dataclass
class ServerConfig:
    """服务器配置"""
    host: str = "0.0.0.0"
    port: int = 8002
    debug: bool = False


@dataclass
class VolcengineConfig:
    """火山引擎配置"""
    api_key: str = ""
    api_url: str = "https://ark.cn-beijing.volces.com"


@dataclass
class ModelConfig:
    """模型配置"""
    name: str = "doubao-seed-1-6-250615"


@dataclass
class ThinkingConfig:
    """思考模式配置"""
    type: str = "disabled"  # enabled, disabled, auto


@dataclass
class ParametersConfig:
    """推理参数配置"""
    temperature: float = 0.7
    top_p: float = 0.95
    max_tokens: int = 4096
    stream: bool = True
    thinking: ThinkingConfig = field(default_factory=ThinkingConfig)
    frequency_penalty: float = 0.0
    presence_penalty: float = 0.0


@dataclass
class LLMConfig:
    """LLM配置"""
    volcengine: VolcengineConfig = field(default_factory=VolcengineConfig)
    model: ModelConfig = field(default_factory=ModelConfig)
    parameters: ParametersConfig = field(default_factory=ParametersConfig)
    timeout: int = 60
    system_prompt: str = ""


@dataclass
class ToolConfig:
    """工具配置"""
    enabled: bool = True
    description: str = ""


@dataclass
class FunctionCallingConfig:
    """Function Calling配置"""
    enabled: bool = True
    tools: Dict[str, ToolConfig] = field(default_factory=dict)


@dataclass
class MCPServerConfig:
    """MCP服务器配置"""
    enabled: bool = False
    command: Optional[str] = None
    args: Optional[List[str]] = None
    env: Optional[Dict[str, str]] = None
    host: Optional[str] = None
    port: Optional[int] = None


@dataclass
class MCPConfig:
    """MCP配置"""
    enabled: bool = True
    servers: Dict[str, MCPServerConfig] = field(default_factory=dict)


@dataclass
class LoggingConfig:
    """日志配置"""
    level: str = "INFO"
    format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"


@dataclass
class ConversationConfig:
    """对话配置"""
    max_history: int = 20
    save_to_file: bool = False
    history_file: str = "conversations/chat_history.json"


@dataclass
class GimbalConfig:
    """云台配置"""
    enabled: bool = False
    serial_port: str = "/dev/ttyS1"
    baudrate: int = 115200
    pan_pin: int = 3
    tilt_pin: int = 4
    init_pan_angle: float = 135.0
    init_tilt_angle: float = 135.0
    angle_range: List[int] = field(default_factory=lambda: [0, 270])
    supported_pins: List[int] = field(default_factory=lambda: [2, 3, 4, 6, 44, 45, 46, 7, 10])


@dataclass
class ArmControlConfig:
    """机械臂控制配置"""
    enabled: bool = False
    serial_port: str = "/dev/ttyS1"
    baudrate: int = 115200


@dataclass
class VLMConfig:
    """视觉语言模型配置"""
    enabled: bool = False
    api_key: str = ""
    api_url: str = "https://api.siliconflow.cn/v1/chat/completions"
    model_name: str = "Qwen/Qwen2-VL-72B-Instruct"
    timeout: int = 120
    max_image_size: int = 10 * 1024 * 1024  # 10MB
    supported_formats: List[str] = field(default_factory=lambda: ["png", "jpg", "jpeg", "gif", "webp"])


@dataclass
class AppConfig:
    """应用总配置"""
    server: ServerConfig = field(default_factory=ServerConfig)
    llm: LLMConfig = field(default_factory=LLMConfig)
    function_calling: FunctionCallingConfig = field(default_factory=FunctionCallingConfig)
    mcp: MCPConfig = field(default_factory=MCPConfig)
    gimbal: GimbalConfig = field(default_factory=GimbalConfig)
    arm_control: ArmControlConfig = field(default_factory=ArmControlConfig)
    vlm: VLMConfig = field(default_factory=VLMConfig)
    logging: LoggingConfig = field(default_factory=LoggingConfig)
    conversation: ConversationConfig = field(default_factory=ConversationConfig)


class ConfigManager:
    """配置管理器"""
    
    def __init__(self, config_file: Optional[str] = None):
        self.config_file = config_file or "config/llm_config.yaml"
        self.config: Optional[AppConfig] = None
        self._load_dotenv()

    def _load_dotenv(self):
        """加载.env文件"""
        if _dotenv_available:
            env_file = Path(".env")
            if env_file.exists():
                load_dotenv(env_file)
                logger.info(f"已加载环境变量文件: {env_file}")
            else:
                logger.debug("未找到.env文件，跳过加载")
        else:
            logger.debug("python-dotenv未安装，跳过.env文件加载")

    def load_config(self) -> AppConfig:
        """加载配置文件"""
        config_path = Path(self.config_file)
        
        if not config_path.exists():
            logger.warning(f"配置文件不存在: {config_path}，使用默认配置")
            self.config = AppConfig()
            self._save_default_config(config_path)
            return self.config
        
        try:
            with open(config_path, 'r', encoding='utf-8') as f:
                config_data = yaml.safe_load(f)
            
            self.config = self._parse_config(config_data)
            self._load_env_variables()
            self._validate_config()
            
            logger.info(f"配置文件加载成功: {config_path}")
            return self.config
            
        except Exception as e:
            logger.error(f"加载配置文件失败: {e}，使用默认配置")
            self.config = AppConfig()
            return self.config
    
    def _parse_config(self, config_data: Dict[str, Any]) -> AppConfig:
        """解析配置数据"""
        config = AppConfig()
        
        # 解析服务器配置
        if 'server' in config_data:
            server_data = config_data['server']
            config.server = ServerConfig(
                host=server_data.get('host', config.server.host),
                port=server_data.get('port', config.server.port),
                debug=server_data.get('debug', config.server.debug)
            )
        
        # 解析LLM配置
        if 'llm' in config_data:
            llm_data = config_data['llm']
            
            # 火山引擎配置
            volcengine_data = llm_data.get('volcengine', {})
            volcengine_config = VolcengineConfig(
                api_key=volcengine_data.get('api_key', ''),
                api_url=volcengine_data.get('api_url', 'https://ark.cn-beijing.volces.com')
            )
            
            # 模型配置
            model_data = llm_data.get('model', {})
            model_config = ModelConfig(
                name=model_data.get('name', 'doubao-seed-1-6-250615')
            )
            
            # 参数配置
            params_data = llm_data.get('parameters', {})
            thinking_data = params_data.get('thinking', {})
            thinking_config = ThinkingConfig(
                type=thinking_data.get('type', 'disabled')
            )
            
            parameters_config = ParametersConfig(
                temperature=params_data.get('temperature', 0.7),
                top_p=params_data.get('top_p', 0.95),
                max_tokens=params_data.get('max_tokens', 4096),
                stream=params_data.get('stream', True),
                thinking=thinking_config,
                frequency_penalty=params_data.get('frequency_penalty', 0.0),
                presence_penalty=params_data.get('presence_penalty', 0.0)
            )
            
            config.llm = LLMConfig(
                volcengine=volcengine_config,
                model=model_config,
                parameters=parameters_config,
                timeout=llm_data.get('timeout', 60),
                system_prompt=llm_data.get('system_prompt', '')
            )
        
        # 解析Function Calling配置
        if 'function_calling' in config_data:
            fc_data = config_data['function_calling']
            tools_data = fc_data.get('tools', {})
            tools_config = {}
            
            for tool_name, tool_data in tools_data.items():
                tools_config[tool_name] = ToolConfig(
                    enabled=tool_data.get('enabled', True),
                    description=tool_data.get('description', '')
                )
            
            config.function_calling = FunctionCallingConfig(
                enabled=fc_data.get('enabled', True),
                tools=tools_config
            )
        
        # 解析MCP配置
        if 'mcp' in config_data:
            mcp_data = config_data['mcp']
            servers_data = mcp_data.get('servers', {})
            servers_config = {}
            
            for server_name, server_data in servers_data.items():
                servers_config[server_name] = MCPServerConfig(
                    enabled=server_data.get('enabled', False),
                    command=server_data.get('command'),
                    args=server_data.get('args'),
                    env=server_data.get('env'),
                    host=server_data.get('host'),
                    port=server_data.get('port')
                )
            
            config.mcp = MCPConfig(
                enabled=mcp_data.get('enabled', True),
                servers=servers_config
            )
        
        # 解析云台配置
        if 'gimbal' in config_data:
            gimbal_data = config_data['gimbal']
            config.gimbal = GimbalConfig(
                enabled=gimbal_data.get('enabled', False),
                serial_port=gimbal_data.get('serial_port', '/dev/ttyS1'),
                baudrate=gimbal_data.get('baudrate', 115200),
                pan_pin=gimbal_data.get('pan_pin', 3),
                tilt_pin=gimbal_data.get('tilt_pin', 4),
                init_pan_angle=gimbal_data.get('init_pan_angle', 135.0),
                init_tilt_angle=gimbal_data.get('init_tilt_angle', 135.0),
                angle_range=gimbal_data.get('angle_range', [0, 270]),
                supported_pins=gimbal_data.get('supported_pins', [2, 3, 4, 6, 44, 45, 46, 7, 10])
            )
        
        # 解析机械臂控制配置
        if 'arm_control' in config_data:
            arm_data = config_data['arm_control']
            config.arm_control = ArmControlConfig(
                enabled=arm_data.get('enabled', False),
                serial_port=arm_data.get('serial_port', '/dev/ttyS1'),
                baudrate=arm_data.get('baudrate', 115200)
            )
        
        # 解析VLM配置
        if 'vlm' in config_data:
            vlm_data = config_data['vlm']
            config.vlm = VLMConfig(
                enabled=vlm_data.get('enabled', False),
                api_key=vlm_data.get('api_key', ''),
                api_url=vlm_data.get('api_url', 'https://api.siliconflow.cn/v1/chat/completions'),
                model_name=vlm_data.get('model_name', 'Qwen/Qwen2-VL-72B-Instruct'),
                timeout=vlm_data.get('timeout', 120),
                max_image_size=vlm_data.get('max_image_size', 10 * 1024 * 1024),
                supported_formats=vlm_data.get('supported_formats', ['png', 'jpg', 'jpeg', 'gif', 'webp'])
            )
        
        # 解析日志配置
        if 'logging' in config_data:
            logging_data = config_data['logging']
            config.logging = LoggingConfig(
                level=logging_data.get('level', 'INFO'),
                format=logging_data.get('format', '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
            )
        
        # 解析对话配置
        if 'conversation' in config_data:
            conv_data = config_data['conversation']
            config.conversation = ConversationConfig(
                max_history=conv_data.get('max_history', 20),
                save_to_file=conv_data.get('save_to_file', False),
                history_file=conv_data.get('history_file', 'conversations/chat_history.json')
            )
        
        return config
    
    def _load_env_variables(self):
        """从环境变量加载敏感配置"""
        if self.config:
            # 从环境变量读取API密钥
            env_api_key = os.getenv("VOLCENGINE_LLM_API_KEY")
            if env_api_key:
                self.config.llm.volcengine.api_key = env_api_key
                logger.info("已从环境变量加载火山引擎API密钥")

            # 从环境变量读取高德地图API密钥
            amap_key = os.getenv("AMAP_API_KEY")
            if amap_key and 'amap_maps' in self.config.mcp.servers:
                if self.config.mcp.servers['amap_maps'].env is None:
                    self.config.mcp.servers['amap_maps'].env = {}
                self.config.mcp.servers['amap_maps'].env['AMAP_API_KEY'] = amap_key
                logger.info("已从环境变量加载高德地图API密钥")

            # 服务器配置覆盖
            server_host = os.getenv("LLM_SERVER_HOST")
            if server_host:
                self.config.server.host = server_host
                logger.info(f"服务器主机地址已从环境变量覆盖: {server_host}")

            server_port = os.getenv("LLM_SERVER_PORT")
            if server_port:
                try:
                    self.config.server.port = int(server_port)
                    logger.info(f"服务器端口已从环境变量覆盖: {server_port}")
                except ValueError:
                    logger.warning(f"无效的服务器端口环境变量: {server_port}")

            server_debug = os.getenv("LLM_SERVER_DEBUG")
            if server_debug:
                self.config.server.debug = server_debug.lower() in ('true', '1', 'yes', 'on')
                logger.info(f"调试模式已从环境变量覆盖: {self.config.server.debug}")

            # 日志级别覆盖
            log_level = os.getenv("LLM_LOG_LEVEL")
            if log_level:
                self.config.logging.level = log_level.upper()
                logger.info(f"日志级别已从环境变量覆盖: {log_level}")

            # 模型配置覆盖
            model_name = os.getenv("LLM_MODEL_NAME")
            if model_name:
                self.config.llm.model.name = model_name
                logger.info(f"模型名称已从环境变量覆盖: {model_name}")

            temperature = os.getenv("LLM_TEMPERATURE")
            if temperature:
                try:
                    self.config.llm.parameters.temperature = float(temperature)
                    logger.info(f"温度参数已从环境变量覆盖: {temperature}")
                except ValueError:
                    logger.warning(f"无效的温度参数环境变量: {temperature}")

            max_tokens = os.getenv("LLM_MAX_TOKENS")
            if max_tokens:
                try:
                    self.config.llm.parameters.max_tokens = int(max_tokens)
                    logger.info(f"最大token数已从环境变量覆盖: {max_tokens}")
                except ValueError:
                    logger.warning(f"无效的最大token数环境变量: {max_tokens}")

            # 对话配置覆盖
            max_history = os.getenv("LLM_MAX_HISTORY")
            if max_history:
                try:
                    self.config.conversation.max_history = int(max_history)
                    logger.info(f"最大历史记录数已从环境变量覆盖: {max_history}")
                except ValueError:
                    logger.warning(f"无效的最大历史记录数环境变量: {max_history}")

            save_history = os.getenv("LLM_SAVE_HISTORY")
            if save_history:
                self.config.conversation.save_to_file = save_history.lower() in ('true', '1', 'yes', 'on')
                logger.info(f"保存历史记录设置已从环境变量覆盖: {self.config.conversation.save_to_file}")
            
            # VLM配置覆盖
            vlm_api_key = os.getenv("SILICONFLOW_API_KEY")
            if vlm_api_key:
                self.config.vlm.api_key = vlm_api_key
                logger.info("已从环境变量加载SiliconFlow API密钥")
            
            vlm_api_url = os.getenv("SILICONFLOW_API_URL")
            if vlm_api_url:
                self.config.vlm.api_url = vlm_api_url
                logger.info(f"VLM API URL已从环境变量覆盖: {vlm_api_url}")
            
            vlm_model_name = os.getenv("VLM_MODEL_NAME")
            if vlm_model_name:
                self.config.vlm.model_name = vlm_model_name
                logger.info(f"VLM模型名称已从环境变量覆盖: {vlm_model_name}")
    
    def _validate_config(self):
        """验证配置参数"""
        if not self.config:
            return
        
        # 验证服务器配置
        if not (1 <= self.config.server.port <= 65535):
            raise ValueError(f"无效的端口号: {self.config.server.port}")
        
        # 验证LLM配置
        if not self.config.llm.volcengine.api_key:
            logger.warning("未设置火山引擎API密钥，请设置环境变量 VOLCENGINE_LLM_API_KEY")
        
        if self.config.llm.timeout <= 0:
            raise ValueError(f"无效的超时时间: {self.config.llm.timeout}")
        
        logger.info("配置验证通过")
    
    def _save_default_config(self, config_path: Path):
        """保存默认配置文件"""
        try:
            config_path.parent.mkdir(parents=True, exist_ok=True)
            
            default_config = {
                'server': {
                    'host': '0.0.0.0',
                    'port': 8002,
                    'debug': False
                },
                'llm': {
                    'volcengine': {
                        'api_key': '',
                        'api_url': 'https://ark.cn-beijing.volces.com'
                    },
                    'model': {
                        'name': 'doubao-seed-1-6-250615'
                    },
                    'parameters': {
                        'temperature': 0.7,
                        'top_p': 0.95,
                        'max_tokens': 4096,
                        'stream': True,
                        'thinking': {
                            'type': 'disabled'
                        }
                    },
                    'timeout': 60,
                    'system_prompt': '你是一个智能AI助手，名叫小U。'
                },
                'function_calling': {
                    'enabled': True,
                    'tools': {
                        'calculator': {
                            'enabled': True,
                            'description': '基本数学计算工具'
                        }
                    }
                },
                'mcp': {
                    'enabled': True,
                    'servers': {}
                },
                'logging': {
                    'level': 'INFO',
                    'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
                },
                'conversation': {
                    'max_history': 20,
                    'save_to_file': False,
                    'history_file': 'conversations/chat_history.json'
                }
            }
            
            with open(config_path, 'w', encoding='utf-8') as f:
                yaml.dump(default_config, f, default_flow_style=False, allow_unicode=True)
            
            logger.info(f"默认配置文件已保存: {config_path}")
            
        except Exception as e:
            logger.error(f"保存默认配置文件失败: {e}")
    
    def get_config(self) -> AppConfig:
        """获取配置"""
        if self.config is None:
            self.load_config()
        return self.config


# 全局配置管理器实例
config_manager = ConfigManager()


def get_config() -> AppConfig:
    """获取应用配置"""
    return config_manager.get_config()
