"""
执行统计和性能监控模块
"""

import time
import threading
import psutil
import os
from typing import Dict, List, Any, Optional, Tuple
from dataclasses import dataclass, field
from collections import defaultdict, deque
from enum import Enum

class MetricType(Enum):
    """指标类型"""
    COUNTER = "counter"
    GAUGE = "gauge"
    HISTOGRAM = "histogram"
    TIMER = "timer"

@dataclass
class Metric:
    """指标对象"""
    name: str
    metric_type: MetricType
    value: float = 0.0
    timestamp: float = field(default_factory=time.time)
    tags: Dict[str, str] = field(default_factory=dict)
    
    def update(self, value: float, timestamp: float = None):
        """更新指标值"""
        self.value = value
        self.timestamp = timestamp or time.time()

@dataclass
class HistogramBucket:
    """直方图桶"""
    upper_bound: float
    count: int = 0

class Histogram:
    """直方图指标"""
    
    def __init__(self, name: str, buckets: List[float] = None):
        self.name = name
        self.buckets = []
        self.total_count = 0
        self.sum_value = 0.0
        
        # 默认桶边界
        if buckets is None:
            buckets = [0.1, 0.5, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, float('inf')]
        
        for bound in sorted(buckets):
            self.buckets.append(HistogramBucket(bound))
    
    def observe(self, value: float):
        """观察一个值"""
        self.total_count += 1
        self.sum_value += value
        
        for bucket in self.buckets:
            if value <= bucket.upper_bound:
                bucket.count += 1
    
    def quantile(self, q: float) -> float:
        """计算分位数"""
        if self.total_count == 0:
            return 0.0
        
        target_count = self.total_count * q
        current_count = 0
        
        for bucket in self.buckets:
            current_count += bucket.count
            if current_count >= target_count:
                return bucket.upper_bound
        
        return self.buckets[-1].upper_bound
    
    def mean(self) -> float:
        """计算平均值"""
        if self.total_count == 0:
            return 0.0
        return self.sum_value / self.total_count

class Timer:
    """计时器"""
    
    def __init__(self, name: str):
        self.name = name
        self.histogram = Histogram(f"{name}_duration")
        self.start_time = None
    
    def start(self):
        """开始计时"""
        self.start_time = time.time()
    
    def stop(self) -> float:
        """停止计时并记录"""
        if self.start_time is None:
            return 0.0
        
        duration = time.time() - self.start_time
        self.histogram.observe(duration)
        self.start_time = None
        return duration
    
    def __enter__(self):
        """上下文管理器入口"""
        self.start()
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        """上下文管理器出口"""
        self.stop()

class MetricsCollector:
    """指标收集器"""
    
    def __init__(self):
        self.metrics: Dict[str, Metric] = {}
        self.histograms: Dict[str, Histogram] = {}
        self.timers: Dict[str, Timer] = {}
        self.lock = threading.RLock()
        
        # 时间序列数据
        self.time_series: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000))
    
    def counter(self, name: str, value: float = 1.0, tags: Dict[str, str] = None):
        """记录计数器指标"""
        with self.lock:
            key = self._build_metric_key(name, tags)
            
            if key not in self.metrics:
                self.metrics[key] = Metric(name, MetricType.COUNTER, 0.0, tags=tags or {})
            
            self.metrics[key].value += value
            self.metrics[key].timestamp = time.time()
            
            # 记录时间序列
            self.time_series[key].append((time.time(), self.metrics[key].value))
    
    def gauge(self, name: str, value: float, tags: Dict[str, str] = None):
        """记录仪表指标"""
        with self.lock:
            key = self._build_metric_key(name, tags)
            
            if key not in self.metrics:
                self.metrics[key] = Metric(name, MetricType.GAUGE, tags=tags or {})
            
            self.metrics[key].value = value
            self.metrics[key].timestamp = time.time()
            
            # 记录时间序列
            self.time_series[key].append((time.time(), value))
    
    def histogram(self, name: str) -> Histogram:
        """获取或创建直方图"""
        with self.lock:
            if name not in self.histograms:
                self.histograms[name] = Histogram(name)
            return self.histograms[name]
    
    def timer(self, name: str) -> Timer:
        """获取或创建计时器"""
        with self.lock:
            if name not in self.timers:
                self.timers[name] = Timer(name)
            return self.timers[name]
    
    def _build_metric_key(self, name: str, tags: Dict[str, str] = None) -> str:
        """构建指标键"""
        if not tags:
            return name
        
        tag_str = ",".join(f"{k}={v}" for k, v in sorted(tags.items()))
        return f"{name}[{tag_str}]"
    
    def get_metric(self, name: str, tags: Dict[str, str] = None) -> Optional[Metric]:
        """获取指标"""
        key = self._build_metric_key(name, tags)
        return self.metrics.get(key)
    
    def get_all_metrics(self) -> Dict[str, Metric]:
        """获取所有指标"""
        with self.lock:
            return self.metrics.copy()
    
    def get_time_series(self, name: str, tags: Dict[str, str] = None, 
                       duration_seconds: int = 300) -> List[Tuple[float, float]]:
        """获取时间序列数据"""
        key = self._build_metric_key(name, tags)
        cutoff_time = time.time() - duration_seconds
        
        with self.lock:
            series = self.time_series.get(key, deque())
            return [(ts, value) for ts, value in series if ts > cutoff_time]
    
    def reset(self):
        """重置所有指标"""
        with self.lock:
            self.metrics.clear()
            self.histograms.clear()
            self.timers.clear()
            self.time_series.clear()

class SystemMonitor:
    """系统监控器"""
    
    def __init__(self, collector: MetricsCollector):
        self.collector = collector
        self.process = psutil.Process(os.getpid())
        self.monitoring = False
        self.monitor_thread = None
        self.monitor_interval = 5.0  # 5秒
    
    def start_monitoring(self):
        """开始系统监控"""
        if self.monitoring:
            return
        
        self.monitoring = True
        self.monitor_thread = threading.Thread(target=self._monitor_loop, daemon=True)
        self.monitor_thread.start()
    
    def stop_monitoring(self):
        """停止系统监控"""
        self.monitoring = False
        if self.monitor_thread:
            self.monitor_thread.join(timeout=10.0)
    
    def _monitor_loop(self):
        """监控循环"""
        while self.monitoring:
            try:
                self._collect_system_metrics()
                time.sleep(self.monitor_interval)
            except Exception as e:
                # 记录错误但继续监控
                print(f"System monitoring error: {e}")
                time.sleep(self.monitor_interval)
    
    def _collect_system_metrics(self):
        """收集系统指标"""
        # CPU使用率
        cpu_percent = self.process.cpu_percent()
        self.collector.gauge("system.cpu.percent", cpu_percent)
        
        # 内存使用
        memory_info = self.process.memory_info()
        self.collector.gauge("system.memory.rss", memory_info.rss)
        self.collector.gauge("system.memory.vms", memory_info.vms)
        
        memory_percent = self.process.memory_percent()
        self.collector.gauge("system.memory.percent", memory_percent)
        
        # 文件描述符
        try:
            num_fds = self.process.num_fds()
            self.collector.gauge("system.fds.count", num_fds)
        except AttributeError:
            # Windows不支持
            pass
        
        # 线程数
        num_threads = self.process.num_threads()
        self.collector.gauge("system.threads.count", num_threads)
        
        # 系统整体CPU和内存
        system_cpu = psutil.cpu_percent()
        self.collector.gauge("system.global.cpu.percent", system_cpu)
        
        system_memory = psutil.virtual_memory()
        self.collector.gauge("system.global.memory.percent", system_memory.percent)
        self.collector.gauge("system.global.memory.available", system_memory.available)

class ExecutionStatistics:
    """执行统计类"""
    
    def __init__(self):
        self.collector = MetricsCollector()
        self.system_monitor = SystemMonitor(self.collector)
        self.query_stats: Dict[str, Dict[str, Any]] = {}
        self.operator_stats: Dict[str, Dict[str, Any]] = {}
        self.lock = threading.RLock()
        
        # 启动系统监控
        self.system_monitor.start_monitoring()
    
    def record_query_start(self, query_id: str, query_type: str = "unknown"):
        """记录查询开始"""
        with self.lock:
            self.query_stats[query_id] = {
                "query_type": query_type,
                "start_time": time.time(),
                "end_time": None,
                "duration_ms": None,
                "rows_processed": 0,
                "rows_returned": 0,
                "operators_used": [],
                "memory_peak_mb": 0,
                "io_operations": 0,
                "status": "running"
            }
        
        # 记录指标
        self.collector.counter("query.started", tags={"type": query_type})
    
    def record_query_end(self, query_id: str, status: str = "completed", 
                        error_message: str = None):
        """记录查询结束"""
        with self.lock:
            if query_id not in self.query_stats:
                return
            
            stats = self.query_stats[query_id]
            stats["end_time"] = time.time()
            stats["duration_ms"] = (stats["end_time"] - stats["start_time"]) * 1000
            stats["status"] = status
            
            if error_message:
                stats["error_message"] = error_message
        
        # 记录指标
        duration = self.query_stats[query_id]["duration_ms"]
        query_type = self.query_stats[query_id]["query_type"]
        
        self.collector.counter("query.completed", tags={"type": query_type, "status": status})
        self.collector.histogram("query.duration").observe(duration)
        self.collector.gauge("query.rows_processed", 
                           self.query_stats[query_id]["rows_processed"],
                           tags={"query_id": query_id})
    
    def update_query_stats(self, query_id: str, **kwargs):
        """更新查询统计"""
        with self.lock:
            if query_id in self.query_stats:
                self.query_stats[query_id].update(kwargs)
    
    def record_operator_execution(self, operator_id: str, operator_type: str,
                                 execution_time_ms: float, rows_processed: int,
                                 rows_returned: int, memory_used_mb: float = 0):
        """记录操作符执行统计"""
        with self.lock:
            if operator_id not in self.operator_stats:
                self.operator_stats[operator_id] = {
                    "operator_type": operator_type,
                    "executions": 0,
                    "total_time_ms": 0,
                    "total_rows_processed": 0,
                    "total_rows_returned": 0,
                    "total_memory_used_mb": 0,
                    "avg_time_ms": 0,
                    "avg_selectivity": 0
                }
            
            stats = self.operator_stats[operator_id]
            stats["executions"] += 1
            stats["total_time_ms"] += execution_time_ms
            stats["total_rows_processed"] += rows_processed
            stats["total_rows_returned"] += rows_returned
            stats["total_memory_used_mb"] += memory_used_mb
            
            # 计算平均值
            stats["avg_time_ms"] = stats["total_time_ms"] / stats["executions"]
            if stats["total_rows_processed"] > 0:
                stats["avg_selectivity"] = stats["total_rows_returned"] / stats["total_rows_processed"]
        
        # 记录指标
        self.collector.histogram(f"operator.{operator_type}.duration").observe(execution_time_ms)
        self.collector.counter(f"operator.{operator_type}.executions")
        self.collector.gauge(f"operator.{operator_type}.rows_processed", rows_processed)
        self.collector.gauge(f"operator.{operator_type}.memory_used", memory_used_mb)
    
    def get_query_statistics(self, query_id: str = None) -> Dict[str, Any]:
        """获取查询统计信息"""
        with self.lock:
            if query_id:
                return self.query_stats.get(query_id, {})
            else:
                return self.query_stats.copy()
    
    def get_operator_statistics(self, operator_id: str = None) -> Dict[str, Any]:
        """获取操作符统计信息"""
        with self.lock:
            if operator_id:
                return self.operator_stats.get(operator_id, {})
            else:
                return self.operator_stats.copy()
    
    def get_performance_summary(self) -> Dict[str, Any]:
        """获取性能摘要"""
        with self.lock:
            summary = {
                "total_queries": len(self.query_stats),
                "completed_queries": sum(1 for q in self.query_stats.values() 
                                       if q["status"] == "completed"),
                "failed_queries": sum(1 for q in self.query_stats.values() 
                                    if q["status"] == "failed"),
                "running_queries": sum(1 for q in self.query_stats.values() 
                                     if q["status"] == "running"),
                "total_operators": len(self.operator_stats),
                "avg_query_duration_ms": 0,
                "total_rows_processed": 0,
                "total_rows_returned": 0
            }
            
            # 计算平均查询时间
            completed_queries = [q for q in self.query_stats.values() 
                               if q["status"] == "completed" and q["duration_ms"]]
            if completed_queries:
                summary["avg_query_duration_ms"] = sum(q["duration_ms"] for q in completed_queries) / len(completed_queries)
            
            # 计算总行数
            summary["total_rows_processed"] = sum(q.get("rows_processed", 0) for q in self.query_stats.values())
            summary["total_rows_returned"] = sum(q.get("rows_returned", 0) for q in self.query_stats.values())
            
            return summary
    
    def get_top_slow_queries(self, limit: int = 10) -> List[Dict[str, Any]]:
        """获取最慢的查询"""
        with self.lock:
            completed_queries = [
                {"query_id": qid, **stats} 
                for qid, stats in self.query_stats.items()
                if stats["status"] == "completed" and stats["duration_ms"]
            ]
            
            # 按执行时间排序
            completed_queries.sort(key=lambda q: q["duration_ms"], reverse=True)
            
            return completed_queries[:limit]
    
    def get_operator_performance(self) -> Dict[str, Dict[str, Any]]:
        """获取操作符性能统计"""
        with self.lock:
            performance = defaultdict(lambda: {
                "total_executions": 0,
                "total_time_ms": 0,
                "avg_time_ms": 0,
                "total_rows_processed": 0,
                "avg_selectivity": 0
            })
            
            for op_id, stats in self.operator_stats.items():
                op_type = stats["operator_type"]
                perf = performance[op_type]
                
                perf["total_executions"] += stats["executions"]
                perf["total_time_ms"] += stats["total_time_ms"]
                perf["total_rows_processed"] += stats["total_rows_processed"]
            
            # 计算平均值
            for op_type, perf in performance.items():
                if perf["total_executions"] > 0:
                    perf["avg_time_ms"] = perf["total_time_ms"] / perf["total_executions"]
                
                ops_of_type = [s for s in self.operator_stats.values() 
                             if s["operator_type"] == op_type]
                if ops_of_type:
                    perf["avg_selectivity"] = sum(s["avg_selectivity"] for s in ops_of_type) / len(ops_of_type)
            
            return dict(performance)
    
    def export_metrics(self, format_type: str = "json") -> str:
        """导出指标数据"""
        if format_type == "json":
            import json
            
            data = {
                "timestamp": time.time(),
                "metrics": {name: {
                    "type": metric.metric_type.value,
                    "value": metric.value,
                    "timestamp": metric.timestamp,
                    "tags": metric.tags
                } for name, metric in self.collector.get_all_metrics().items()},
                "histograms": {name: {
                    "buckets": [{"upper_bound": b.upper_bound, "count": b.count} 
                              for b in hist.buckets],
                    "total_count": hist.total_count,
                    "sum": hist.sum_value,
                    "mean": hist.mean()
                } for name, hist in self.collector.histograms.items()},
                "query_stats": self.query_stats,
                "operator_stats": self.operator_stats
            }
            
            return json.dumps(data, indent=2)
        
        else:
            raise ValueError(f"Unsupported export format: {format_type}")
    
    def cleanup_old_stats(self, retention_hours: int = 24):
        """清理旧的统计数据"""
        cutoff_time = time.time() - (retention_hours * 3600)
        
        with self.lock:
            # 清理旧的查询统计
            old_queries = [
                qid for qid, stats in self.query_stats.items()
                if stats.get("end_time", time.time()) < cutoff_time
            ]
            
            for qid in old_queries:
                del self.query_stats[qid]
    
    def shutdown(self):
        """关闭统计模块"""
        self.system_monitor.stop_monitoring()

# 全局统计实例
_global_statistics = None

def get_statistics() -> ExecutionStatistics:
    """获取全局统计实例"""
    global _global_statistics
    if _global_statistics is None:
        _global_statistics = ExecutionStatistics()
    return _global_statistics

def set_statistics(statistics: ExecutionStatistics):
    """设置全局统计实例"""
    global _global_statistics
    _global_statistics = statistics



