"""
Performance Monitor and Metrics Collection for GSM Extensions

This module provides comprehensive performance monitoring and metrics collection
for the GSM extension system, including execution timing, resource usage tracking,
and performance analytics.
"""

import time
import psutil
import threading
from pathlib import Path
from typing import Dict, List, Any, Optional, Callable, NamedTuple, Union
from dataclasses import dataclass, field, asdict
from datetime import datetime, timedelta
from collections import defaultdict, deque
from contextlib import contextmanager
import json
import sqlite3
from threading import RLock
import functools
import statistics


class MetricType:
    """Enumeration of metric types."""
    COUNTER = "counter"
    GAUGE = "gauge"
    HISTOGRAM = "histogram"
    TIMING = "timing"


@dataclass
class MetricValue:
    """Represents a single metric measurement."""
    name: str
    value: float
    metric_type: str
    timestamp: datetime
    labels: Dict[str, str] = field(default_factory=dict)
    unit: str = ""
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary for serialization."""
        return {
            'name': self.name,
            'value': self.value,
            'metric_type': self.metric_type,
            'timestamp': self.timestamp.isoformat(),
            'labels': self.labels,
            'unit': self.unit
        }


@dataclass
class PerformanceSnapshot:
    """Snapshot of system performance at a point in time."""
    timestamp: datetime
    cpu_percent: float
    memory_percent: float
    memory_used_mb: float
    disk_io_read_mb: float
    disk_io_write_mb: float
    network_sent_mb: float
    network_recv_mb: float
    active_threads: int
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary."""
        data = asdict(self)
        data['timestamp'] = self.timestamp.isoformat()
        return data


@dataclass
class ExtensionMetrics:
    """Metrics for a specific extension."""
    name: str
    execution_count: int = 0
    total_execution_time: float = 0.0
    average_execution_time: float = 0.0
    min_execution_time: float = float('inf')
    max_execution_time: float = 0.0
    error_count: int = 0
    success_count: int = 0
    last_execution: Optional[datetime] = None
    custom_metrics: Dict[str, List[float]] = field(default_factory=dict)
    
    def update_timing(self, execution_time: float):
        """Update timing metrics."""
        self.execution_count += 1
        self.total_execution_time += execution_time
        self.average_execution_time = self.total_execution_time / self.execution_count
        self.min_execution_time = min(self.min_execution_time, execution_time)
        self.max_execution_time = max(self.max_execution_time, execution_time)
        self.last_execution = datetime.now()
    
    def add_custom_metric(self, name: str, value: float):
        """Add a custom metric value."""
        if name not in self.custom_metrics:
            self.custom_metrics[name] = []
        self.custom_metrics[name].append(value)


class ResourceMonitor:
    """Monitors system resources during extension execution."""
    
    def __init__(self, sample_interval: float = 1.0):
        self.sample_interval = sample_interval
        self.monitoring = False
        self.snapshots: List[PerformanceSnapshot] = []
        self._monitor_thread: Optional[threading.Thread] = None
        self._lock = RLock()
        
        # Initialize baseline measurements
        self._baseline_disk_io = psutil.disk_io_counters()
        self._baseline_network = psutil.net_io_counters()
    
    def start_monitoring(self):
        """Start resource monitoring in background thread."""
        with self._lock:
            if self.monitoring:
                return
            
            self.monitoring = True
            self.snapshots.clear()
            self._monitor_thread = threading.Thread(target=self._monitor_loop, daemon=True)
            self._monitor_thread.start()
    
    def stop_monitoring(self) -> List[PerformanceSnapshot]:
        """Stop monitoring and return collected snapshots."""
        with self._lock:
            if not self.monitoring:
                return self.snapshots.copy()
            
            self.monitoring = False
            if self._monitor_thread:
                self._monitor_thread.join(timeout=2.0)
            
            return self.snapshots.copy()
    
    def _monitor_loop(self):
        """Main monitoring loop."""
        while self.monitoring:
            try:
                snapshot = self._capture_snapshot()
                with self._lock:
                    self.snapshots.append(snapshot)
                
                time.sleep(self.sample_interval)
            except Exception:
                # Continue monitoring even if individual snapshots fail
                continue
    
    def _capture_snapshot(self) -> PerformanceSnapshot:
        """Capture current system performance snapshot."""
        # CPU and memory
        cpu_percent = psutil.cpu_percent()
        memory = psutil.virtual_memory()
        
        # Disk I/O
        disk_io = psutil.disk_io_counters()
        disk_read_mb = 0.0
        disk_write_mb = 0.0
        if disk_io and self._baseline_disk_io:
            disk_read_mb = (disk_io.read_bytes - self._baseline_disk_io.read_bytes) / (1024 * 1024)
            disk_write_mb = (disk_io.write_bytes - self._baseline_disk_io.write_bytes) / (1024 * 1024)
        
        # Network I/O
        network = psutil.net_io_counters()
        network_sent_mb = 0.0
        network_recv_mb = 0.0
        if network and self._baseline_network:
            network_sent_mb = (network.bytes_sent - self._baseline_network.bytes_sent) / (1024 * 1024)
            network_recv_mb = (network.bytes_recv - self._baseline_network.bytes_recv) / (1024 * 1024)
        
        return PerformanceSnapshot(
            timestamp=datetime.now(),
            cpu_percent=cpu_percent,
            memory_percent=memory.percent,
            memory_used_mb=memory.used / (1024 * 1024),
            disk_io_read_mb=disk_read_mb,
            disk_io_write_mb=disk_write_mb,
            network_sent_mb=network_sent_mb,
            network_recv_mb=network_recv_mb,
            active_threads=threading.active_count()
        )


class MetricsCollector:
    """Collects and aggregates performance metrics."""
    
    def __init__(self, max_history: int = 10000):
        self.max_history = max_history
        self.metrics: Dict[str, deque] = defaultdict(lambda: deque(maxlen=max_history))
        self.extension_metrics: Dict[str, ExtensionMetrics] = {}
        self._lock = RLock()
    
    def record_metric(self, metric: MetricValue):
        """Record a metric value."""
        with self._lock:
            key = f"{metric.name}:{':'.join(f'{k}={v}' for k, v in sorted(metric.labels.items()))}"
            self.metrics[key].append(metric)
    
    def record_extension_execution(self, extension_name: str, execution_time: float, success: bool = True):
        """Record extension execution metrics."""
        with self._lock:
            if extension_name not in self.extension_metrics:
                self.extension_metrics[extension_name] = ExtensionMetrics(name=extension_name)
            
            metrics = self.extension_metrics[extension_name]
            metrics.update_timing(execution_time)
            
            if success:
                metrics.success_count += 1
            else:
                metrics.error_count += 1
    
    def add_custom_extension_metric(self, extension_name: str, metric_name: str, value: float):
        """Add a custom metric for an extension."""
        with self._lock:
            if extension_name not in self.extension_metrics:
                self.extension_metrics[extension_name] = ExtensionMetrics(name=extension_name)
            
            self.extension_metrics[extension_name].add_custom_metric(metric_name, value)
    
    def get_metric_summary(self, metric_name: str, labels: Optional[Dict[str, str]] = None) -> Dict[str, float]:
        """Get statistical summary of a metric."""
        with self._lock:
            key = f"{metric_name}:{':'.join(f'{k}={v}' for k, v in sorted((labels or {}).items()))}"
            
            if key not in self.metrics or not self.metrics[key]:
                return {}
            
            values = [m.value for m in self.metrics[key]]
            
            return {
                'count': len(values),
                'sum': sum(values),
                'min': min(values),
                'max': max(values),
                'mean': statistics.mean(values),
                'median': statistics.median(values),
                'std_dev': statistics.stdev(values) if len(values) > 1 else 0.0
            }
    
    def get_extension_metrics(self, extension_name: str) -> Optional[ExtensionMetrics]:
        """Get metrics for a specific extension."""
        with self._lock:
            return self.extension_metrics.get(extension_name)
    
    def get_all_extension_metrics(self) -> Dict[str, ExtensionMetrics]:
        """Get metrics for all extensions."""
        with self._lock:
            return self.extension_metrics.copy()


class PerformanceDatabase:
    """Persistent storage for performance metrics."""
    
    def __init__(self, db_path: Optional[Path] = None):
        self.db_path = db_path or Path("performance_metrics.db")
        self._init_database()
    
    def _init_database(self):
        """Initialize database tables."""
        with sqlite3.connect(self.db_path) as conn:
            conn.execute("""
                CREATE TABLE IF NOT EXISTS metrics (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    name TEXT NOT NULL,
                    value REAL NOT NULL,
                    metric_type TEXT NOT NULL,
                    timestamp TEXT NOT NULL,
                    labels TEXT,
                    unit TEXT
                )
            """)
            
            conn.execute("""
                CREATE TABLE IF NOT EXISTS extension_metrics (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    name TEXT NOT NULL,
                    execution_count INTEGER NOT NULL,
                    total_execution_time REAL NOT NULL,
                    average_execution_time REAL NOT NULL,
                    min_execution_time REAL NOT NULL,
                    max_execution_time REAL NOT NULL,
                    error_count INTEGER NOT NULL,
                    success_count INTEGER NOT NULL,
                    last_execution TEXT,
                    custom_metrics TEXT,
                    updated_at TEXT NOT NULL
                )
            """)
            
            conn.execute("""
                CREATE TABLE IF NOT EXISTS performance_snapshots (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    timestamp TEXT NOT NULL,
                    cpu_percent REAL NOT NULL,
                    memory_percent REAL NOT NULL,
                    memory_used_mb REAL NOT NULL,
                    disk_io_read_mb REAL NOT NULL,
                    disk_io_write_mb REAL NOT NULL,
                    network_sent_mb REAL NOT NULL,
                    network_recv_mb REAL NOT NULL,
                    active_threads INTEGER NOT NULL,
                    context TEXT
                )
            """)
            
            conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_name_timestamp ON metrics(name, timestamp)")
            conn.execute("CREATE INDEX IF NOT EXISTS idx_extension_metrics_name ON extension_metrics(name)")
            conn.execute("CREATE INDEX IF NOT EXISTS idx_snapshots_timestamp ON performance_snapshots(timestamp)")
    
    def store_metric(self, metric: MetricValue):
        """Store a metric value in the database."""
        with sqlite3.connect(self.db_path) as conn:
            conn.execute("""
                INSERT INTO metrics (name, value, metric_type, timestamp, labels, unit)
                VALUES (?, ?, ?, ?, ?, ?)
            """, (
                metric.name,
                metric.value,
                metric.metric_type,
                metric.timestamp.isoformat(),
                json.dumps(metric.labels),
                metric.unit
            ))
    
    def store_extension_metrics(self, metrics: ExtensionMetrics):
        """Store extension metrics in the database."""
        with sqlite3.connect(self.db_path) as conn:
            # Check if metrics already exist
            cursor = conn.execute("SELECT id FROM extension_metrics WHERE name = ?", (metrics.name,))
            existing = cursor.fetchone()
            
            if existing:
                # Update existing record
                conn.execute("""
                    UPDATE extension_metrics 
                    SET execution_count = ?, total_execution_time = ?, average_execution_time = ?,
                        min_execution_time = ?, max_execution_time = ?, error_count = ?,
                        success_count = ?, last_execution = ?, custom_metrics = ?, updated_at = ?
                    WHERE name = ?
                """, (
                    metrics.execution_count,
                    metrics.total_execution_time,
                    metrics.average_execution_time,
                    metrics.min_execution_time,
                    metrics.max_execution_time,
                    metrics.error_count,
                    metrics.success_count,
                    metrics.last_execution.isoformat() if metrics.last_execution else None,
                    json.dumps(metrics.custom_metrics),
                    datetime.now().isoformat(),
                    metrics.name
                ))
            else:
                # Insert new record
                conn.execute("""
                    INSERT INTO extension_metrics 
                    (name, execution_count, total_execution_time, average_execution_time,
                     min_execution_time, max_execution_time, error_count, success_count,
                     last_execution, custom_metrics, updated_at)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
                """, (
                    metrics.name,
                    metrics.execution_count,
                    metrics.total_execution_time,
                    metrics.average_execution_time,
                    metrics.min_execution_time,
                    metrics.max_execution_time,
                    metrics.error_count,
                    metrics.success_count,
                    metrics.last_execution.isoformat() if metrics.last_execution else None,
                    json.dumps(metrics.custom_metrics),
                    datetime.now().isoformat()
                ))
    
    def store_performance_snapshots(self, snapshots: List[PerformanceSnapshot], context: str = ""):
        """Store performance snapshots in the database."""
        with sqlite3.connect(self.db_path) as conn:
            for snapshot in snapshots:
                conn.execute("""
                    INSERT INTO performance_snapshots 
                    (timestamp, cpu_percent, memory_percent, memory_used_mb,
                     disk_io_read_mb, disk_io_write_mb, network_sent_mb, network_recv_mb,
                     active_threads, context)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
                """, (
                    snapshot.timestamp.isoformat(),
                    snapshot.cpu_percent,
                    snapshot.memory_percent,
                    snapshot.memory_used_mb,
                    snapshot.disk_io_read_mb,
                    snapshot.disk_io_write_mb,
                    snapshot.network_sent_mb,
                    snapshot.network_recv_mb,
                    snapshot.active_threads,
                    context
                ))


class PerformanceMonitor:
    """Main performance monitoring system for GSM extensions."""
    
    def __init__(self, db_path: Optional[Path] = None, enable_resource_monitoring: bool = True):
        self.db = PerformanceDatabase(db_path)
        self.collector = MetricsCollector()
        self.resource_monitor = ResourceMonitor() if enable_resource_monitoring else None
        self._active_contexts: Dict[str, float] = {}
        self._lock = RLock()
    
    @contextmanager
    def monitor_execution(self, extension_name: str, operation: str = "execution"):
        """Context manager for monitoring extension execution."""
        start_time = time.time()
        context_key = f"{extension_name}:{operation}"
        
        # Start resource monitoring if enabled
        if self.resource_monitor:
            self.resource_monitor.start_monitoring()
        
        try:
            with self._lock:
                self._active_contexts[context_key] = start_time
            
            yield
            
            # Execution successful
            execution_time = time.time() - start_time
            self._record_execution_metrics(extension_name, operation, execution_time, True)
            
        except Exception as e:
            # Execution failed
            execution_time = time.time() - start_time
            self._record_execution_metrics(extension_name, operation, execution_time, False)
            raise
        
        finally:
            with self._lock:
                self._active_contexts.pop(context_key, None)
            
            # Stop resource monitoring and store snapshots
            if self.resource_monitor:
                snapshots = self.resource_monitor.stop_monitoring()
                if snapshots:
                    self.db.store_performance_snapshots(snapshots, context_key)
    
    def _record_execution_metrics(self, extension_name: str, operation: str, 
                                execution_time: float, success: bool):
        """Record execution metrics."""
        # Record in collector
        self.collector.record_extension_execution(extension_name, execution_time, success)
        
        # Create timing metric
        timing_metric = MetricValue(
            name=f"extension_execution_time",
            value=execution_time,
            metric_type=MetricType.TIMING,
            timestamp=datetime.now(),
            labels={
                'extension': extension_name,
                'operation': operation,
                'success': str(success)
            },
            unit="seconds"
        )
        
        self.collector.record_metric(timing_metric)
        self.db.store_metric(timing_metric)
        
        # Store updated extension metrics
        extension_metrics = self.collector.get_extension_metrics(extension_name)
        if extension_metrics:
            self.db.store_extension_metrics(extension_metrics)
    
    def record_custom_metric(self, extension_name: str, metric_name: str, 
                           value: float, labels: Optional[Dict[str, str]] = None):
        """Record a custom metric for an extension."""
        # Add to collector
        self.collector.add_custom_extension_metric(extension_name, metric_name, value)
        
        # Create and store metric
        metric = MetricValue(
            name=metric_name,
            value=value,
            metric_type=MetricType.GAUGE,
            timestamp=datetime.now(),
            labels=dict(labels or {}, extension=extension_name),
            unit=""
        )
        
        self.collector.record_metric(metric)
        self.db.store_metric(metric)
    
    def get_performance_summary(self) -> Dict[str, Any]:
        """Get comprehensive performance summary."""
        extension_metrics = self.collector.get_all_extension_metrics()
        
        summary = {
            'total_extensions': len(extension_metrics),
            'total_executions': sum(m.execution_count for m in extension_metrics.values()),
            'total_errors': sum(m.error_count for m in extension_metrics.values()),
            'extensions': {}
        }
        
        for name, metrics in extension_metrics.items():
            summary['extensions'][name] = {
                'execution_count': metrics.execution_count,
                'success_rate': (metrics.success_count / metrics.execution_count * 100 
                               if metrics.execution_count > 0 else 0),
                'average_execution_time': metrics.average_execution_time,
                'min_execution_time': metrics.min_execution_time if metrics.min_execution_time != float('inf') else 0,
                'max_execution_time': metrics.max_execution_time,
                'last_execution': metrics.last_execution.isoformat() if metrics.last_execution else None,
                'custom_metrics': {
                    name: {
                        'count': len(values),
                        'average': sum(values) / len(values) if values else 0,
                        'min': min(values) if values else 0,
                        'max': max(values) if values else 0
                    }
                    for name, values in metrics.custom_metrics.items()
                }
            }
        
        return summary


def performance_monitor(extension_name: str, operation: str = "execution"):
    """Decorator for monitoring function performance."""
    def decorator(func: Callable) -> Callable:
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            # Get global monitor instance (assumes it exists)
            monitor = getattr(wrapper, '_monitor', None)
            if not monitor:
                return func(*args, **kwargs)
            
            with monitor.monitor_execution(extension_name, operation):
                return func(*args, **kwargs)
        
        return wrapper
    return decorator


# Global performance monitor instance
_global_monitor: Optional[PerformanceMonitor] = None


def get_performance_monitor() -> PerformanceMonitor:
    """Get the global performance monitor instance."""
    global _global_monitor
    if _global_monitor is None:
        _global_monitor = PerformanceMonitor()
    return _global_monitor


def set_performance_monitor(monitor: PerformanceMonitor):
    """Set the global performance monitor instance."""
    global _global_monitor
    _global_monitor = monitor


if __name__ == "__main__":
    # Example usage
    monitor = PerformanceMonitor()
    
    # Example extension execution monitoring
    with monitor.monitor_execution("example_extension", "build"):
        time.sleep(0.1)  # Simulate work
    
    # Record custom metric
    monitor.record_custom_metric("example_extension", "memory_usage_mb", 128.5)
    
    # Get performance summary
    summary = monitor.get_performance_summary()
    print(json.dumps(summary, indent=2))