"""
数据库查询优化器
提供高性能的数据库查询优化策略
"""
from typing import Dict, Any, List, Optional, Union
from datetime import datetime, timedelta
import logging
from sqlalchemy.orm import Session
from sqlalchemy import text, func, and_, or_, desc, asc
from sqlalchemy.sql import select

from models.petition_record import PetitionRecord
from models.petition_record import RegionalAnalysisResult, ComplaintTypeAnalysisResult
from models.petition_record import SentimentAnalysisResult, ComplianceAnalysisResult

logger = logging.getLogger(__name__)


class QueryOptimizer:
    """数据库查询优化器"""

    def __init__(self):
        self.query_stats = {}  # 查询统计信息
        self.slow_query_threshold = 2.0  # 慢查询阈值（秒）

    def optimize_petition_query(self, db: Session, filters: Dict = None) -> str:
        """优化信访记录查询

        Args:
            db: 数据库会话
            filters: 过滤条件

        Returns:
            优化后的SQL查询字符串
        """
        # 基础查询SQL
        base_sql = """
        SELECT
            pr.id,
            pr.xh,
            pr.flnr,
            pr.ts_ds,
            pr.ts_qxs,
            pr.ts_jd,
            pr.djsj,
            pr.created_at,
            rar.dzxx_lng,
            rar.dzxx_lat,
            ctar.tslx_yj,
            ctar.tslx_rj,
            ctar.tslx_sj,
            sar.qgfx_lx,
            car.gffx_zt
        FROM petition_record pr
        LEFT JOIN regional_analysis_result rar ON pr.xh = rar.petition_record_id
        LEFT JOIN complaint_type_analysis_result ctar ON pr.xh = ctar.petition_record_id
        LEFT JOIN sentiment_analysis_result sar ON pr.xh = sar.petition_record_id
        LEFT JOIN compliance_analysis_result car ON pr.xh = car.petition_record_id
        WHERE pr.is_deleted = 0
        """

        # 添加过滤条件
        conditions = []
        params = {}

        if filters:
            if filters.get('start_date'):
                conditions.append("pr.djsj >= :start_date")
                params['start_date'] = filters['start_date']

            if filters.get('end_date'):
                conditions.append("pr.djsj <= :end_date")
                params['end_date'] = filters['end_date']

            if filters.get('region'):
                conditions.append("pr.ts_ds = :region")
                params['region'] = filters['region']

            if filters.get('district'):
                conditions.append("pr.ts_qxs = :district")
                params['district'] = filters['district']

            if filters.get('task_id'):
                conditions.append("pr.task_id = :task_id")
                params['task_id'] = filters['task_id']

        if conditions:
            base_sql += " AND " + " AND ".join(conditions)

        # 添加排序（默认按登记时间倒序）
        base_sql += " ORDER BY pr.djsj DESC"

        return base_sql, params

    def get_time_series_query(self, granularity: str = 'day', days: int = 30) -> str:
        """获取时间序列查询SQL

        Args:
            granularity: 时间粒度（day/week/month/year）
            days: 查询天数

        Returns:
            优化后的时间序列查询SQL
        """
        end_date = datetime.now()
        start_date = end_date - timedelta(days=days)

        # 根据粒度确定时间格式
        time_formats = {
            'day': "%Y-%m-%d",
            'week': "%Y-%W",
            'month': "%Y-%m",
            'year': "%Y"
        }

        time_format = time_formats.get(granularity, "%Y-%m-%d")

        # 使用窗口函数优化查询性能
        query_sql = f"""
        WITH time_stats AS (
            SELECT
                strftime('{time_format}', djsj) as period,
                COUNT(*) as count,
                COUNT(DISTINCT ts_ds) as region_count,
                LAG(COUNT(*)) OVER (ORDER BY strftime('{time_format}', djsj)) as prev_count
            FROM petition_record
            WHERE djsj >= :start_date
            AND is_deleted = 0
            GROUP BY strftime('{time_format}', djsj)
        )
        SELECT
            period,
            count,
            region_count,
            prev_count,
            CASE
                WHEN prev_count > 0
                THEN ROUND(((count - prev_count) * 100.0 / prev_count), 2)
                ELSE 0
            END as growth_rate
        FROM time_stats
        ORDER BY period
        """

        return query_sql, {'start_date': start_date}

    def get_regional_query(self, level: str = 'district', limit: int = 20) -> str:
        """获取区域统计查询SQL

        Args:
            level: 区域级别（city/district/street）
            limit: 返回记录数限制

        Returns:
            优化后的区域统计查询SQL
        """
        # 根据级别选择字段
        field_map = {
            'city': 'ts_ds',
            'district': 'ts_qxs',
            'street': 'ts_jd'
        }

        region_field = field_map.get(level, 'ts_qxs')

        query_sql = f"""
        SELECT
            {region_field} as region_name,
            COUNT(*) as count,
            COUNT(DISTINCT flnr) as type_count,
            ROUND(COUNT(*) * 100.0 / SUM(COUNT(*)) OVER (), 2) as percentage,
            RANK() OVER (ORDER BY COUNT(*) DESC) as rank
        FROM petition_record
        WHERE {region_field} IS NOT NULL
        AND is_deleted = 0
        GROUP BY {region_field}
        ORDER BY count DESC
        LIMIT :limit
        """

        return query_sql, {'limit': limit}

    def get_complaint_type_query(self, limit: int = 20) -> str:
        """获取投诉类型统计查询SQL

        Args:
            limit: 返回记录数限制

        Returns:
            优化后的投诉类型查询SQL
        """
        query_sql = """
        SELECT
            flnr as type_name,
            COUNT(*) as count,
            ROUND(COUNT(*) * 100.0 / SUM(COUNT(*)) OVER (), 2) as percentage,
            COUNT(DISTINCT ts_ds) as region_count,
            RANK() OVER (ORDER BY COUNT(*) DESC) as rank
        FROM petition_record
        WHERE flnr IS NOT NULL
        AND is_deleted = 0
        GROUP BY flnr
        ORDER BY count DESC
        LIMIT :limit
        """

        return query_sql, {'limit': limit}

    def get_sentiment_query(self, limit: int = 10) -> str:
        """获取情感分析查询SQL

        Args:
            limit: 返回记录数限制

        Returns:
            优化后的情感分析查询SQL
        """
        query_sql = """
        SELECT
            sar.qgfx_lx as sentiment_type,
            COUNT(pr.id) as count,
            ROUND(COUNT(pr.id) * 100.0 / SUM(COUNT(pr.id)) OVER (), 2) as percentage,
            COUNT(DISTINCT pr.ts_ds) as region_count
        FROM petition_record pr
        INNER JOIN sentiment_analysis_result sar ON pr.xh = sar.petition_record_id
        WHERE sar.qgfx_lx IS NOT NULL
        AND pr.is_deleted = 0
        GROUP BY sar.qgfx_lx
        ORDER BY count DESC
        LIMIT :limit
        """

        return query_sql, {'limit': limit}

    def get_compliance_query(self, limit: int = 20) -> str:
        """获取合规性查询SQL

        Args:
            limit: 返回记录数限制

        Returns:
            优化后的合规性查询SQL
        """
        query_sql = """
        WITH compliance_stats AS (
            SELECT
                car.gffx_zt as compliance_status,
                COUNT(pr.id) as count,
                pr.ts_qxdw as department
            FROM petition_record pr
            INNER JOIN compliance_analysis_result car ON pr.xh = car.petition_record_id
            WHERE car.gffx_zt IS NOT NULL
            AND pr.is_deleted = 0
            GROUP BY car.gffx_zt, pr.ts_qxdw
        ),
        summary AS (
            SELECT
                compliance_status,
                SUM(count) as total_count,
                ROUND(SUM(count) * 100.0 / SUM(SUM(count)) OVER (), 2) as percentage
            FROM compliance_stats
            GROUP BY compliance_status
        ),
        department_stats AS (
            SELECT
                department,
                SUM(CASE WHEN compliance_status = '1' THEN count ELSE 0 END) as compliant_count,
                SUM(CASE WHEN compliance_status = '0' THEN count ELSE 0 END) as non_compliant_count,
                SUM(count) as total_count,
                ROUND(SUM(CASE WHEN compliance_status = '1' THEN count ELSE 0 END) * 100.0 / SUM(count), 2) as compliance_rate
            FROM compliance_stats
            WHERE department IS NOT NULL
            GROUP BY department
            ORDER BY compliance_rate DESC
            LIMIT :limit
        )
        SELECT * FROM summary
        UNION ALL
        SELECT
            'department' as compliance_status,
            total_count,
            compliance_rate as percentage
        FROM department_stats
        """

        return query_sql, {'limit': limit}

    def execute_optimized_query(self, db: Session, query: str, params: Dict = None) -> List[Dict]:
        """执行优化查询

        Args:
            db: 数据库会话
            query: SQL查询字符串
            params: 查询参数

        Returns:
            查询结果列表
        """
        try:
            start_time = datetime.now()

            # 执行查询
            result = db.execute(text(query), params or {})
            data = [dict(row._mapping) for row in result.fetchall()]

            # 记录查询统计
            execution_time = (datetime.now() - start_time).total_seconds()
            self._log_query_performance(query, execution_time, len(data))

            return data

        except Exception as e:
            logger.error(f"查询执行失败: {str(e)}")
            raise

    def get_index_suggestions(self) -> List[str]:
        """获取索引建议

        Returns:
            索引建议列表
        """
        suggestions = [
            "CREATE INDEX IF NOT EXISTS idx_petition_record_djsj ON petition_record(djsj)",
            "CREATE INDEX IF NOT EXISTS idx_petition_record_region ON petition_record(ts_ds, ts_qxs)",
            "CREATE INDEX IF NOT EXISTS idx_petition_record_type ON petition_record(flnr)",
            "CREATE INDEX IF NOT EXISTS idx_petition_record_task ON petition_record(task_id)",
            "CREATE INDEX IF NOT EXISTS idx_analysis_result_petition_id ON regional_analysis_result(petition_record_id)",
            "CREATE INDEX IF NOT EXISTS idx_analysis_result_task_id ON regional_analysis_result(task_id)",
        ]

        return suggestions

    def _log_query_performance(self, query: str, execution_time: float, result_count: int):
        """记录查询性能"""
        query_hash = hash(query)

        if query_hash not in self.query_stats:
            self.query_stats[query_hash] = {
                'query': query[:100] + '...' if len(query) > 100 else query,
                'execution_count': 0,
                'total_time': 0,
                'avg_time': 0,
                'max_time': 0,
                'min_time': float('inf'),
                'result_counts': []
            }

        stats = self.query_stats[query_hash]
        stats['execution_count'] += 1
        stats['total_time'] += execution_time
        stats['avg_time'] = stats['total_time'] / stats['execution_count']
        stats['max_time'] = max(stats['max_time'], execution_time)
        stats['min_time'] = min(stats['min_time'], execution_time)
        stats['result_counts'].append(result_count)

        # 记录慢查询
        if execution_time > self.slow_query_threshold:
            logger.warning(
                f"慢查询检测: 执行时间 {execution_time:.2f}s, "
                f"返回记录数 {result_count}, "
                f"查询: {query[:200]}..."
            )

    def get_performance_stats(self) -> Dict[str, Any]:
        """获取性能统计信息

        Returns:
            性能统计信息
        """
        if not self.query_stats:
            return {'message': '暂无查询统计信息'}

        total_queries = sum(stats['execution_count'] for stats in self.query_stats.values())
        total_time = sum(stats['total_time'] for stats in self.query_stats.values())
        avg_time = total_time / total_queries if total_queries > 0 else 0

        slow_queries = [
            {
                'query': stats['query'],
                'avg_time': stats['avg_time'],
                'execution_count': stats['execution_count']
            }
            for stats in self.query_stats.values()
            if stats['avg_time'] > self.slow_query_threshold
        ]

        return {
            'total_queries': total_queries,
            'total_time': round(total_time, 2),
            'avg_time': round(avg_time, 4),
            'slow_query_count': len(slow_queries),
            'slow_queries': slow_queries[:5],  # 只返回前5个慢查询
            'index_suggestions': self.get_index_suggestions()
        }

    def clear_stats(self):
        """清空统计信息"""
        self.query_stats.clear()
        logger.info("查询性能统计已清空")


# 全局查询优化器实例
query_optimizer = QueryOptimizer()