"""
企业级金融数据MCP服务系统 - 核心配置管理
提供统一的配置管理、环境变量处理和系统参数配置
"""

import os
from typing import Optional, Dict, Any
from dataclasses import dataclass, field
from pathlib import Path
import logging


@dataclass
class DatabaseConfig:
    """数据库配置"""
    host: str = "localhost"
    port: int = 5432
    database: str = "financial_data"
    username: str = "postgres"
    password: str = ""
    pool_size: int = 20
    max_overflow: int = 30
    pool_timeout: int = 30
    pool_recycle: int = 3600
    echo: bool = False
    
    # 连接池配置
    pool_pre_ping: bool = True
    pool_reset_on_return: str = "commit"
    
    # 连接超时配置
    connect_timeout: int = 10
    command_timeout: int = 60
    
    # SSL配置
    ssl_mode: str = "prefer"
    ssl_cert: Optional[str] = None
    ssl_key: Optional[str] = None
    ssl_ca: Optional[str] = None
    
    # 数据库引擎配置
    isolation_level: str = "READ_COMMITTED"
    autocommit: bool = False
    autoflush: bool = True
    expire_on_commit: bool = True
    
    @property
    def url(self) -> str:
        """构建数据库连接URL"""
        return f"postgresql://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}"
    
    @property
    def async_url(self) -> str:
        """构建异步数据库连接URL"""
        return f"postgresql+asyncpg://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}"


@dataclass
class RedisConfig:
    """Redis缓存配置"""
    enabled: bool = True
    host: str = "localhost"
    port: int = 6379
    database: int = 0
    password: Optional[str] = None
    max_connections: int = 50
    socket_timeout: int = 5
    socket_connect_timeout: int = 5
    retry_on_timeout: bool = True
    
    @property
    def url(self) -> str:
        """构建Redis连接URL"""
        auth = f":{self.password}@" if self.password else ""
        return f"redis://{auth}{self.host}:{self.port}/{self.database}"


@dataclass
class CacheConfig:
    """缓存策略配置"""
    # L1缓存（内存）配置
    l1_max_size: int = 1000
    l1_ttl: int = 300  # 5分钟
    
    # L2缓存（Redis）配置
    l2_ttl: int = 3600  # 1小时
    l2_max_memory: str = "512mb"
    
    # 缓存预热配置
    warmup_enabled: bool = True
    warmup_batch_size: int = 100


@dataclass
class DataSyncConfig:
    """数据同步配置"""
    # 实时数据同步间隔（秒）
    realtime_interval: int = 60
    
    # 日频数据同步间隔（秒）
    daily_interval: int = 3600
    
    # 低频数据同步间隔（秒）
    weekly_interval: int = 86400
    
    # 并发同步任务数
    max_concurrent_tasks: int = 10
    
    # 重试配置
    max_retries: int = 3
    retry_delay: int = 5
    
    # 数据质量检查
    quality_check_enabled: bool = True
    quality_threshold: float = 0.95


@dataclass
class MonitoringConfig:
    """监控配置"""
    # Prometheus配置
    prometheus_enabled: bool = True
    prometheus_port: int = 8000
    
    # 日志配置
    log_level: str = "INFO"
    log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    log_file: str = "logs/financial_mcp.log"
    log_max_size: int = 10 * 1024 * 1024  # 10MB
    log_backup_count: int = 5
    
    # 告警配置
    alert_enabled: bool = True
    alert_webhook: Optional[str] = None
    
    # 性能监控
    performance_sampling_rate: float = 0.1
    slow_query_threshold: float = 1.0  # 秒


@dataclass
class SecurityConfig:
    """安全配置"""
    # API密钥管理
    api_key_required: bool = True
    api_key_header: str = "X-API-Key"
    
    # 速率限制
    rate_limit_enabled: bool = True
    rate_limit_requests: int = 1000
    rate_limit_window: int = 3600  # 1小时
    
    # CORS配置
    cors_enabled: bool = True
    cors_origins: list = field(default_factory=lambda: ["*"])
    
    # 数据加密
    encryption_enabled: bool = False
    encryption_key: Optional[str] = None


@dataclass
class MCPConfig:
    """MCP服务配置"""
    # 服务基本信息
    service_name: str = "financial-data-mcp"
    service_version: str = "1.0.0"
    service_description: str = "企业级金融数据MCP服务"
    
    # 服务端口
    port: int = 8080
    host: str = "0.0.0.0"
    
    # 工作进程配置
    workers: int = 4
    worker_class: str = "uvicorn.workers.UvicornWorker"
    
    # 请求配置
    max_request_size: int = 10 * 1024 * 1024  # 10MB
    request_timeout: int = 30
    
    # 工具配置
    max_tools: int = 100
    tool_timeout: int = 60


class SystemConfig:
    """系统配置管理器"""
    
    def __init__(self, env_file: Optional[str] = None):
        """初始化配置管理器"""
        self.env_file = env_file or ".env"
        self._load_env_file()
        
        # 初始化各模块配置
        self.database = self._load_database_config()
        self.redis = self._load_redis_config()
        self.cache = self._load_cache_config()
        self.data_sync = self._load_data_sync_config()
        self.monitoring = self._load_monitoring_config()
        self.security = self._load_security_config()
        self.mcp = self._load_mcp_config()
        
        # 创建必要的目录
        self._create_directories()
        
        # 配置日志
        self._setup_logging()
    
    def _load_env_file(self):
        """加载环境变量文件"""
        if os.path.exists(self.env_file):
            with open(self.env_file, 'r', encoding='utf-8') as f:
                for line in f:
                    line = line.strip()
                    if line and not line.startswith('#') and '=' in line:
                        key, value = line.split('=', 1)
                        os.environ[key.strip()] = value.strip()
    
    def _load_database_config(self) -> DatabaseConfig:
        """加载数据库配置"""
        return DatabaseConfig(
            host=os.getenv("DB_HOST", "localhost"),
            port=int(os.getenv("DB_PORT", "5432")),
            database=os.getenv("DB_NAME", "financial_data"),
            username=os.getenv("DB_USER", "postgres"),
            password=os.getenv("DB_PASSWORD", ""),
            pool_size=int(os.getenv("DB_POOL_SIZE", "20")),
            max_overflow=int(os.getenv("DB_MAX_OVERFLOW", "30")),
            pool_timeout=int(os.getenv("DB_POOL_TIMEOUT", "30")),
            pool_recycle=int(os.getenv("DB_POOL_RECYCLE", "3600")),
            echo=os.getenv("DB_ECHO", "false").lower() == "true",
            pool_pre_ping=os.getenv("DB_POOL_PRE_PING", "true").lower() == "true",
            pool_reset_on_return=os.getenv("DB_POOL_RESET_ON_RETURN", "commit"),
            connect_timeout=int(os.getenv("DB_CONNECT_TIMEOUT", "10")),
            command_timeout=int(os.getenv("DB_COMMAND_TIMEOUT", "60")),
            ssl_mode=os.getenv("DB_SSL_MODE", "prefer"),
            ssl_cert=os.getenv("DB_SSL_CERT"),
            ssl_key=os.getenv("DB_SSL_KEY"),
            ssl_ca=os.getenv("DB_SSL_CA"),
            isolation_level=os.getenv("DB_ISOLATION_LEVEL", "READ_COMMITTED"),
            autocommit=os.getenv("DB_AUTOCOMMIT", "false").lower() == "true",
            autoflush=os.getenv("DB_AUTOFLUSH", "true").lower() == "true",
            expire_on_commit=os.getenv("DB_EXPIRE_ON_COMMIT", "true").lower() == "true"
        )
    
    def _load_redis_config(self) -> RedisConfig:
        """加载Redis配置"""
        return RedisConfig(
            host=os.getenv("REDIS_HOST", "localhost"),
            port=int(os.getenv("REDIS_PORT", "6379")),
            database=int(os.getenv("REDIS_DB", "0")),
            password=os.getenv("REDIS_PASSWORD"),
            max_connections=int(os.getenv("REDIS_MAX_CONNECTIONS", "50")),
            socket_timeout=int(os.getenv("REDIS_SOCKET_TIMEOUT", "5")),
            socket_connect_timeout=int(os.getenv("REDIS_CONNECT_TIMEOUT", "5"))
        )
    
    def _load_cache_config(self) -> CacheConfig:
        """加载缓存配置"""
        return CacheConfig(
            l1_max_size=int(os.getenv("CACHE_L1_MAX_SIZE", "1000")),
            l1_ttl=int(os.getenv("CACHE_L1_TTL", "300")),
            l2_ttl=int(os.getenv("CACHE_L2_TTL", "3600")),
            l2_max_memory=os.getenv("CACHE_L2_MAX_MEMORY", "512mb"),
            warmup_enabled=os.getenv("CACHE_WARMUP_ENABLED", "true").lower() == "true",
            warmup_batch_size=int(os.getenv("CACHE_WARMUP_BATCH_SIZE", "100"))
        )
    
    def _load_data_sync_config(self) -> DataSyncConfig:
        """加载数据同步配置"""
        return DataSyncConfig(
            realtime_interval=int(os.getenv("SYNC_REALTIME_INTERVAL", "60")),
            daily_interval=int(os.getenv("SYNC_DAILY_INTERVAL", "3600")),
            weekly_interval=int(os.getenv("SYNC_WEEKLY_INTERVAL", "86400")),
            max_concurrent_tasks=int(os.getenv("SYNC_MAX_CONCURRENT", "10")),
            max_retries=int(os.getenv("SYNC_MAX_RETRIES", "3")),
            retry_delay=int(os.getenv("SYNC_RETRY_DELAY", "5")),
            quality_check_enabled=os.getenv("SYNC_QUALITY_CHECK", "true").lower() == "true",
            quality_threshold=float(os.getenv("SYNC_QUALITY_THRESHOLD", "0.95"))
        )
    
    def _load_monitoring_config(self) -> MonitoringConfig:
        """加载监控配置"""
        return MonitoringConfig(
            prometheus_enabled=os.getenv("MONITORING_PROMETHEUS", "true").lower() == "true",
            prometheus_port=int(os.getenv("MONITORING_PROMETHEUS_PORT", "8000")),
            log_level=os.getenv("LOG_LEVEL", "INFO"),
            log_format=os.getenv("LOG_FORMAT", "%(asctime)s - %(name)s - %(levelname)s - %(message)s"),
            log_file=os.getenv("LOG_FILE", "logs/financial_mcp.log"),
            log_max_size=int(os.getenv("LOG_MAX_SIZE", str(10 * 1024 * 1024))),
            log_backup_count=int(os.getenv("LOG_BACKUP_COUNT", "5")),
            alert_enabled=os.getenv("ALERT_ENABLED", "true").lower() == "true",
            alert_webhook=os.getenv("ALERT_WEBHOOK"),
            performance_sampling_rate=float(os.getenv("PERF_SAMPLING_RATE", "0.1")),
            slow_query_threshold=float(os.getenv("SLOW_QUERY_THRESHOLD", "1.0"))
        )
    
    def _load_security_config(self) -> SecurityConfig:
        """加载安全配置"""
        cors_origins = os.getenv("CORS_ORIGINS", "*").split(",")
        return SecurityConfig(
            api_key_required=os.getenv("API_KEY_REQUIRED", "true").lower() == "true",
            api_key_header=os.getenv("API_KEY_HEADER", "X-API-Key"),
            rate_limit_enabled=os.getenv("RATE_LIMIT_ENABLED", "true").lower() == "true",
            rate_limit_requests=int(os.getenv("RATE_LIMIT_REQUESTS", "1000")),
            rate_limit_window=int(os.getenv("RATE_LIMIT_WINDOW", "3600")),
            cors_enabled=os.getenv("CORS_ENABLED", "true").lower() == "true",
            cors_origins=cors_origins,
            encryption_enabled=os.getenv("ENCRYPTION_ENABLED", "false").lower() == "true",
            encryption_key=os.getenv("ENCRYPTION_KEY")
        )
    
    def _load_mcp_config(self) -> MCPConfig:
        """加载MCP配置"""
        return MCPConfig(
            service_name=os.getenv("MCP_SERVICE_NAME", "financial-data-mcp"),
            service_version=os.getenv("MCP_SERVICE_VERSION", "1.0.0"),
            service_description=os.getenv("MCP_SERVICE_DESC", "企业级金融数据MCP服务"),
            port=int(os.getenv("MCP_PORT", "8080")),
            host=os.getenv("MCP_HOST", "0.0.0.0"),
            workers=int(os.getenv("MCP_WORKERS", "4")),
            worker_class=os.getenv("MCP_WORKER_CLASS", "uvicorn.workers.UvicornWorker"),
            max_request_size=int(os.getenv("MCP_MAX_REQUEST_SIZE", str(10 * 1024 * 1024))),
            request_timeout=int(os.getenv("MCP_REQUEST_TIMEOUT", "30")),
            max_tools=int(os.getenv("MCP_MAX_TOOLS", "100")),
            tool_timeout=int(os.getenv("MCP_TOOL_TIMEOUT", "60"))
        )
    
    def _create_directories(self):
        """创建必要的目录"""
        directories = [
            "logs",
            "data",
            "cache",
            "backups",
            "temp"
        ]
        
        for directory in directories:
            Path(directory).mkdir(parents=True, exist_ok=True)
    
    def _setup_logging(self):
        """配置日志系统"""
        from logging.handlers import RotatingFileHandler
        
        # 确保日志目录存在
        log_dir = Path(self.monitoring.log_file).parent
        log_dir.mkdir(parents=True, exist_ok=True)
        
        # 配置根日志器
        logging.basicConfig(
            level=getattr(logging, self.monitoring.log_level),
            format=self.monitoring.log_format,
            handlers=[
                # 控制台处理器
                logging.StreamHandler(),
                # 文件处理器（带轮转）
                RotatingFileHandler(
                    self.monitoring.log_file,
                    maxBytes=self.monitoring.log_max_size,
                    backupCount=self.monitoring.log_backup_count,
                    encoding='utf-8'
                )
            ]
        )
    
    def get_config_dict(self) -> Dict[str, Any]:
        """获取配置字典"""
        return {
            "database": self.database.__dict__,
            "redis": self.redis.__dict__,
            "cache": self.cache.__dict__,
            "data_sync": self.data_sync.__dict__,
            "monitoring": self.monitoring.__dict__,
            "security": self.security.__dict__,
            "mcp": self.mcp.__dict__
        }
    
    def validate_config(self) -> bool:
        """验证配置有效性"""
        try:
            # 验证数据库配置
            if not all([self.database.host, self.database.database, 
                       self.database.username]):
                raise ValueError("数据库配置不完整")
            
            # 验证Redis配置
            if not all([self.redis.host, str(self.redis.port)]):
                raise ValueError("Redis配置不完整")
            
            # 验证端口配置
            if not (1 <= self.mcp.port <= 65535):
                raise ValueError("MCP服务端口配置无效")
            
            if not (1 <= self.monitoring.prometheus_port <= 65535):
                raise ValueError("Prometheus端口配置无效")
            
            return True
            
        except Exception as e:
            logging.error(f"配置验证失败: {e}")
            return False


# 全局配置实例
config = SystemConfig()


def get_settings() -> SystemConfig:
    """获取全局配置实例
    
    这个函数提供了一个标准的方式来获取配置实例，
    遵循FastAPI和其他现代Python框架的配置模式。
    
    Returns:
        SystemConfig: 全局配置实例
    """
    return config