"""
数据库查询优化配置
包含连接池、缓存、查询优化等功能
"""

import asyncio
import time
import hashlib
import json
from typing import Dict, Any, Optional, List, Callable
from functools import wraps
from sqlalchemy import event, text
from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session
from sqlalchemy.pool import QueuePool
import redis
import logging

logger = logging.getLogger(__name__)


class DatabaseOptimizer:
    """数据库优化器"""
    
    def __init__(self, redis_client: Optional[redis.Redis] = None):
        self.redis_client = redis_client
        self.query_cache = {}
        self.query_stats = {}
        self.slow_query_threshold = 1.0  # 慢查询阈值（秒）
        
    def setup_connection_pool(self, engine: Engine):
        """设置连接池优化"""
        # 连接池配置
        engine.pool._creator = engine.pool._creator
        engine.pool._recycle = 3600  # 1小时回收连接
        engine.pool._pre_ping = True  # 连接前ping检查
        
        # 监听连接事件
        @event.listens_for(engine, "connect")
        def set_sqlite_pragma(dbapi_connection, connection_record):
            """设置SQLite优化参数"""
            if 'sqlite' in str(engine.url):
                cursor = dbapi_connection.cursor()
                # 启用WAL模式
                cursor.execute("PRAGMA journal_mode=WAL")
                # 设置同步模式
                cursor.execute("PRAGMA synchronous=NORMAL")
                # 设置缓存大小
                cursor.execute("PRAGMA cache_size=10000")
                # 启用外键约束
                cursor.execute("PRAGMA foreign_keys=ON")
                cursor.close()
        
        # 监听查询事件
        @event.listens_for(engine, "before_cursor_execute")
        def receive_before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
            """查询执行前的处理"""
            context._query_start_time = time.time()
        
        @event.listens_for(engine, "after_cursor_execute")
        def receive_after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
            """查询执行后的处理"""
            total_time = time.time() - context._query_start_time
            
            # 记录慢查询
            if total_time > self.slow_query_threshold:
                logger.warning(f"慢查询检测: {total_time:.3f}s - {statement[:100]}...")
                self._record_slow_query(statement, parameters, total_time)
            
            # 更新查询统计
            self._update_query_stats(statement, total_time)
    
    def _record_slow_query(self, statement: str, parameters: Any, execution_time: float):
        """记录慢查询"""
        slow_query = {
            "statement": statement,
            "parameters": str(parameters),
            "execution_time": execution_time,
            "timestamp": time.time()
        }
        
        # 保存到Redis（如果可用）
        if self.redis_client:
            try:
                key = f"slow_query:{int(time.time())}"
                self.redis_client.setex(key, 86400, json.dumps(slow_query))  # 保存24小时
            except Exception as e:
                logger.error(f"保存慢查询记录失败: {e}")
    
    def _update_query_stats(self, statement: str, execution_time: float):
        """更新查询统计"""
        # 简化SQL语句作为key
        query_key = self._normalize_query(statement)
        
        if query_key not in self.query_stats:
            self.query_stats[query_key] = {
                "count": 0,
                "total_time": 0,
                "avg_time": 0,
                "max_time": 0,
                "min_time": float('inf')
            }
        
        stats = self.query_stats[query_key]
        stats["count"] += 1
        stats["total_time"] += execution_time
        stats["avg_time"] = stats["total_time"] / stats["count"]
        stats["max_time"] = max(stats["max_time"], execution_time)
        stats["min_time"] = min(stats["min_time"], execution_time)
    
    def _normalize_query(self, statement: str) -> str:
        """标准化查询语句"""
        # 移除参数占位符，保留查询结构
        import re
        normalized = re.sub(r'\$\d+|\?|:\w+', '?', statement)
        normalized = re.sub(r'\s+', ' ', normalized).strip()
        return normalized[:100]  # 限制长度
    
    def get_query_stats(self) -> Dict[str, Any]:
        """获取查询统计信息"""
        return {
            "total_queries": sum(stats["count"] for stats in self.query_stats.values()),
            "avg_query_time": sum(stats["avg_time"] * stats["count"] for stats in self.query_stats.values()) / 
                            max(sum(stats["count"] for stats in self.query_stats.values()), 1),
            "slow_queries": len([stats for stats in self.query_stats.values() if stats["max_time"] > self.slow_query_threshold]),
            "query_details": self.query_stats
        }


class QueryCache:
    """查询缓存"""
    
    def __init__(self, redis_client: Optional[redis.Redis] = None, default_ttl: int = 300):
        self.redis_client = redis_client
        self.local_cache = {}
        self.default_ttl = default_ttl
        self.cache_stats = {"hits": 0, "misses": 0}
    
    def _generate_cache_key(self, query: str, params: Any) -> str:
        """生成缓存键"""
        cache_data = f"{query}:{str(params)}"
        return f"query_cache:{hashlib.md5(cache_data.encode()).hexdigest()}"
    
    def get(self, query: str, params: Any) -> Optional[Any]:
        """获取缓存结果"""
        cache_key = self._generate_cache_key(query, params)
        
        # 先尝试Redis缓存
        if self.redis_client:
            try:
                cached_data = self.redis_client.get(cache_key)
                if cached_data:
                    self.cache_stats["hits"] += 1
                    return json.loads(cached_data)
            except Exception as e:
                logger.error(f"Redis缓存读取失败: {e}")
        
        # 尝试本地缓存
        if cache_key in self.local_cache:
            cache_entry = self.local_cache[cache_key]
            if time.time() < cache_entry["expires_at"]:
                self.cache_stats["hits"] += 1
                return cache_entry["data"]
            else:
                del self.local_cache[cache_key]
        
        self.cache_stats["misses"] += 1
        return None
    
    def set(self, query: str, params: Any, result: Any, ttl: Optional[int] = None):
        """设置缓存结果"""
        cache_key = self._generate_cache_key(query, params)
        ttl = ttl or self.default_ttl
        
        # 序列化结果
        try:
            serialized_result = json.dumps(result, default=str)
        except (TypeError, ValueError):
            logger.warning(f"无法序列化查询结果: {type(result)}")
            return
        
        # 保存到Redis
        if self.redis_client:
            try:
                self.redis_client.setex(cache_key, ttl, serialized_result)
            except Exception as e:
                logger.error(f"Redis缓存写入失败: {e}")
        
        # 保存到本地缓存
        self.local_cache[cache_key] = {
            "data": result,
            "expires_at": time.time() + ttl
        }
        
        # 限制本地缓存大小
        if len(self.local_cache) > 1000:
            # 删除最旧的条目
            oldest_key = min(self.local_cache.keys(), 
                           key=lambda k: self.local_cache[k]["expires_at"])
            del self.local_cache[oldest_key]
    
    def invalidate(self, pattern: str = None):
        """清除缓存"""
        if pattern:
            # 清除匹配模式的缓存
            keys_to_delete = [key for key in self.local_cache.keys() if pattern in key]
            for key in keys_to_delete:
                del self.local_cache[key]
            
            if self.redis_client:
                try:
                    keys = self.redis_client.keys(f"query_cache:*{pattern}*")
                    if keys:
                        self.redis_client.delete(*keys)
                except Exception as e:
                    logger.error(f"Redis缓存清除失败: {e}")
        else:
            # 清除所有缓存
            self.local_cache.clear()
            if self.redis_client:
                try:
                    keys = self.redis_client.keys("query_cache:*")
                    if keys:
                        self.redis_client.delete(*keys)
                except Exception as e:
                    logger.error(f"Redis缓存清除失败: {e}")
    
    def get_stats(self) -> Dict[str, Any]:
        """获取缓存统计"""
        total_requests = self.cache_stats["hits"] + self.cache_stats["misses"]
        hit_rate = self.cache_stats["hits"] / max(total_requests, 1)
        
        return {
            "hits": self.cache_stats["hits"],
            "misses": self.cache_stats["misses"],
            "hit_rate": hit_rate,
            "local_cache_size": len(self.local_cache)
        }


def cached_query(ttl: int = 300, cache_key_func: Optional[Callable] = None):
    """查询缓存装饰器"""
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            # 生成缓存键
            if cache_key_func:
                cache_key = cache_key_func(*args, **kwargs)
            else:
                cache_key = f"{func.__name__}:{str(args)}:{str(kwargs)}"
            
            # 尝试从缓存获取
            if hasattr(func, '_query_cache'):
                cached_result = func._query_cache.get(cache_key, None)
                if cached_result is not None:
                    return cached_result
            
            # 执行查询
            result = func(*args, **kwargs)
            
            # 保存到缓存
            if hasattr(func, '_query_cache'):
                func._query_cache.set(cache_key, None, result, ttl)
            
            return result
        
        return wrapper
    return decorator


class DatabaseIndexOptimizer:
    """数据库索引优化器"""
    
    def __init__(self, db_session: Session):
        self.db_session = db_session
    
    def analyze_missing_indexes(self) -> List[Dict[str, Any]]:
        """分析缺失的索引"""
        suggestions = []
        
        # 分析慢查询日志，建议索引
        # 这里可以根据实际的慢查询模式来建议索引
        
        common_queries = [
            {
                "table": "cameras",
                "columns": ["status", "location"],
                "reason": "频繁按状态和位置查询摄像头"
            },
            {
                "table": "alerts",
                "columns": ["status", "created_at"],
                "reason": "频繁按状态和时间查询告警"
            },
            {
                "table": "ai_tasks",
                "columns": ["camera_id", "status"],
                "reason": "频繁按摄像头和状态查询AI任务"
            }
        ]
        
        for query in common_queries:
            suggestions.append({
                "table": query["table"],
                "columns": query["columns"],
                "index_name": f"idx_{query['table']}_{'_'.join(query['columns'])}",
                "sql": f"CREATE INDEX idx_{query['table']}_{'_'.join(query['columns'])} ON {query['table']} ({', '.join(query['columns'])})",
                "reason": query["reason"]
            })
        
        return suggestions
    
    def create_recommended_indexes(self):
        """创建推荐的索引"""
        suggestions = self.analyze_missing_indexes()
        
        for suggestion in suggestions:
            try:
                self.db_session.execute(text(suggestion["sql"]))
                logger.info(f"创建索引: {suggestion['index_name']}")
            except Exception as e:
                logger.error(f"创建索引失败 {suggestion['index_name']}: {e}")
        
        self.db_session.commit()


class ConnectionPoolMonitor:
    """连接池监控器"""
    
    def __init__(self, engine: Engine):
        self.engine = engine
        self.stats = {
            "total_connections": 0,
            "active_connections": 0,
            "idle_connections": 0,
            "connection_errors": 0
        }
    
    def get_pool_status(self) -> Dict[str, Any]:
        """获取连接池状态"""
        pool = self.engine.pool
        
        return {
            "pool_size": pool.size(),
            "checked_in": pool.checkedin(),
            "checked_out": pool.checkedout(),
            "overflow": pool.overflow(),
            "invalid": pool.invalid(),
            "stats": self.stats
        }
    
    def monitor_connections(self):
        """监控连接状态"""
        pool_status = self.get_pool_status()
        
        # 检查连接池健康状态
        if pool_status["checked_out"] > pool_status["pool_size"] * 0.8:
            logger.warning("连接池使用率过高")
        
        if pool_status["invalid"] > 0:
            logger.warning(f"发现 {pool_status['invalid']} 个无效连接")
        
        return pool_status


# 全局实例
db_optimizer = DatabaseOptimizer()
query_cache = QueryCache()


def setup_database_optimization(engine: Engine, redis_client: Optional[redis.Redis] = None):
    """设置数据库优化"""
    global db_optimizer, query_cache
    
    # 初始化优化器
    if redis_client:
        db_optimizer.redis_client = redis_client
        query_cache.redis_client = redis_client
    
    # 设置连接池优化
    db_optimizer.setup_connection_pool(engine)
    
    logger.info("数据库优化配置已启用")


def get_database_performance_stats() -> Dict[str, Any]:
    """获取数据库性能统计"""
    return {
        "query_stats": db_optimizer.get_query_stats(),
        "cache_stats": query_cache.get_stats(),
        "timestamp": time.time()
    }