use codegraph_analytics::{AdvancedAnalyzer, CodeMetrics};
use codegraph_core::{Result as CoreResult, CodeGraphError};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, error, info, warn};
use uuid::Uuid;

/// Dashboard API request/response types
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MetricsRequest {
    pub file_paths: Option<Vec<String>>,
    pub time_range: Option<TimeRange>,
    pub metrics: Option<Vec<String>>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MetricsResponse {
    pub data: Vec<MetricsData>,
    pub total_count: usize,
    pub has_more: bool,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MetricsData {
    pub complexity: ComplexityMetrics,
    pub quality: QualityMetrics,
    pub maintainability: MaintainabilityMetrics,
    pub performance: PerformanceMetrics,
    pub timestamp: String,
    pub file_path: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ComplexityMetrics {
    pub cyclomatic_complexity: f64,
    pub cognitive_complexity: f64,
    pub halstead_complexity: HalsteadMetrics,
    pub max_nesting_depth: f64,
    pub avg_nesting_depth: f64,
    pub function_complexity: Vec<FunctionComplexityMetrics>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HalsteadMetrics {
    pub distinct_operators: usize,
    pub distinct_operands: usize,
    pub total_operators: usize,
    pub total_operands: usize,
    pub vocabulary: usize,
    pub length: usize,
    pub calculated_length: f64,
    pub volume: f64,
    pub difficulty: f64,
    pub effort: f64,
    pub time: f64,
    pub bugs: f64,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FunctionComplexityMetrics {
    pub name: String,
    pub cyclomatic_complexity: f64,
    pub cognitive_complexity: f64,
    pub lines_of_code: usize,
    pub start_line: usize,
    pub end_line: usize,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QualityMetrics {
    pub duplication_percentage: f64,
    pub test_coverage: f64,
    pub technical_debt_ratio: f64,
    pub code_smells: usize,
    pub security_issues: usize,
    pub performance_issues: usize,
    pub documentation_coverage: f64,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MaintainabilityMetrics {
    pub maintainability_index: f64,
    pub coupling: f64,
    pub cohesion: f64,
    pub inheritance_depth: f64,
    pub responsibility_assignment: f64,
    pub change_impact: f64,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceMetrics {
    pub analysis_time_ms: u64,
    pub memory_usage_bytes: u64,
    pub cpu_usage_percent: f64,
    pub io_operations: u64,
    pub cache_hit_ratio: f64,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimeRange {
    pub start: String,
    pub end: String,
    pub interval: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HistoricalMetricsRequest {
    pub file_path: Option<String>,
    pub metric: String,
    pub time_range: TimeRange,
    pub interval: String,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HistoricalMetricsResponse {
    pub data: Vec<HistoricalDataPoint>,
    pub metric: String,
    pub time_range: TimeRange,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HistoricalDataPoint {
    pub timestamp: String,
    pub value: f64,
    pub metadata: Option<HashMap<String, serde_json::Value>>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DashboardConfig {
    pub id: String,
    pub name: String,
    pub layout: DashboardLayout,
    pub widgets: Vec<DashboardWidget>,
    pub theme: String,
    pub refresh_interval: u64,
    pub created_at: String,
    pub updated_at: String,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DashboardLayout {
    pub columns: u32,
    pub rows: u32,
    pub gap: u32,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DashboardWidget {
    pub id: String,
    pub widget_type: String,
    pub title: String,
    pub position: WidgetPosition,
    pub size: WidgetSize,
    pub config: HashMap<String, serde_json::Value>,
    pub data_source: DataSourceConfig,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WidgetPosition {
    pub x: u32,
    pub y: u32,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WidgetSize {
    pub width: u32,
    pub height: u32,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DataSourceConfig {
    pub metric: String,
    pub aggregation: Option<String>,
    pub time_range: Option<TimeRange>,
    pub filters: Option<HashMap<String, serde_json::Value>>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AlertRule {
    pub id: String,
    pub name: String,
    pub metric: String,
    pub condition: AlertCondition,
    pub threshold: f64,
    pub severity: String,
    pub enabled: bool,
    pub notifications: Vec<NotificationConfig>,
    pub created_at: String,
    pub updated_at: String,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AlertCondition {
    pub operator: String,
    pub duration: Option<u64>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NotificationConfig {
    pub notification_type: String,
    pub target: String,
    pub template: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Alert {
    pub id: String,
    pub rule_id: String,
    pub rule_name: String,
    pub metric: String,
    pub value: f64,
    pub threshold: f64,
    pub severity: String,
    pub status: String,
    pub triggered_at: String,
    pub resolved_at: Option<String>,
    pub message: String,
}

/// API handlers for dashboard functionality
pub struct ApiHandlers {
    analyzer: Arc<RwLock<AdvancedAnalyzer>>,
    metrics_cache: Arc<RwLock<HashMap<String, MetricsData>>>,
    dashboard_configs: Arc<RwLock<HashMap<String, DashboardConfig>>>,
    alert_rules: Arc<RwLock<HashMap<String, AlertRule>>>,
    active_alerts: Arc<RwLock<HashMap<String, Alert>>>,
}

impl ApiHandlers {
    pub fn new(analyzer: AdvancedAnalyzer) -> Self {
        Self {
            analyzer: Arc::new(RwLock::new(analyzer)),
            metrics_cache: Arc::new(RwLock::new(HashMap::new())),
            dashboard_configs: Arc::new(RwLock::new(HashMap::new())),
            alert_rules: Arc::new(RwLock::new(HashMap::new())),
            active_alerts: Arc::new(RwLock::new(HashMap::new())),
        }
    }

    /// Get current metrics for specified files
    pub async fn get_metrics(&self, request: MetricsRequest) -> CoreResult<MetricsResponse> {
        debug!("Getting metrics for request: {:?}", request);

        let file_paths = request.file_paths.unwrap_or_default();
        let mut metrics_data = Vec::new();

        // If no specific files requested, return cached metrics
        if file_paths.is_empty() {
            let cache = self.metrics_cache.read().await;
            metrics_data = cache.values().cloned().collect();
        } else {
            // Analyze specific files
            for file_path in &file_paths {
                match self.analyze_file_metrics(file_path).await {
                    Ok(metrics) => metrics_data.push(metrics),
                    Err(e) => {
                        warn!("Failed to analyze file {}: {}", file_path, e);
                    }
                }
            }
        }

        // Apply time range filter if specified
        if let Some(time_range) = &request.time_range {
            metrics_data = self.filter_by_time_range(metrics_data, time_range).await?;
        }

        // Apply metric filter if specified
        if let Some(metrics_filter) = &request.metrics {
            metrics_data = self.filter_by_metrics(metrics_data, metrics_filter).await?;
        }

        let total_count = metrics_data.len();
        let has_more = false; // TODO: Implement pagination

        Ok(MetricsResponse {
            data: metrics_data,
            total_count,
            has_more,
        })
    }

    /// Get historical metrics data
    pub async fn get_historical_metrics(
        &self,
        request: HistoricalMetricsRequest,
    ) -> CoreResult<HistoricalMetricsResponse> {
        debug!("Getting historical metrics: {:?}", request);

        // TODO: Implement historical data retrieval from storage
        // For now, return mock data
        let data = vec![
            HistoricalDataPoint {
                timestamp: "2024-01-01T00:00:00Z".to_string(),
                value: 10.0,
                metadata: None,
            },
            HistoricalDataPoint {
                timestamp: "2024-01-02T00:00:00Z".to_string(),
                value: 12.0,
                metadata: None,
            },
        ];

        Ok(HistoricalMetricsResponse {
            data,
            metric: request.metric,
            time_range: request.time_range,
        })
    }

    /// Create or update dashboard configuration
    pub async fn save_dashboard_config(&self, config: DashboardConfig) -> CoreResult<DashboardConfig> {
        debug!("Saving dashboard config: {}", config.id);

        let mut configs = self.dashboard_configs.write().await;
        configs.insert(config.id.clone(), config.clone());

        info!("Dashboard config saved: {}", config.id);
        Ok(config)
    }

    /// Get dashboard configuration by ID
    pub async fn get_dashboard_config(&self, id: &str) -> CoreResult<Option<DashboardConfig>> {
        debug!("Getting dashboard config: {}", id);

        let configs = self.dashboard_configs.read().await;
        Ok(configs.get(id).cloned())
    }

    /// List all dashboard configurations
    pub async fn list_dashboard_configs(&self) -> CoreResult<Vec<DashboardConfig>> {
        debug!("Listing dashboard configs");

        let configs = self.dashboard_configs.read().await;
        Ok(configs.values().cloned().collect())
    }

    /// Delete dashboard configuration
    pub async fn delete_dashboard_config(&self, id: &str) -> CoreResult<bool> {
        debug!("Deleting dashboard config: {}", id);

        let mut configs = self.dashboard_configs.write().await;
        let removed = configs.remove(id).is_some();

        if removed {
            info!("Dashboard config deleted: {}", id);
        }

        Ok(removed)
    }

    /// Create or update alert rule
    pub async fn save_alert_rule(&self, rule: AlertRule) -> CoreResult<AlertRule> {
        debug!("Saving alert rule: {}", rule.id);

        let mut rules = self.alert_rules.write().await;
        rules.insert(rule.id.clone(), rule.clone());

        info!("Alert rule saved: {}", rule.id);
        Ok(rule)
    }

    /// Get alert rule by ID
    pub async fn get_alert_rule(&self, id: &str) -> CoreResult<Option<AlertRule>> {
        debug!("Getting alert rule: {}", id);

        let rules = self.alert_rules.read().await;
        Ok(rules.get(id).cloned())
    }

    /// List all alert rules
    pub async fn list_alert_rules(&self) -> CoreResult<Vec<AlertRule>> {
        debug!("Listing alert rules");

        let rules = self.alert_rules.read().await;
        Ok(rules.values().cloned().collect())
    }

    /// Delete alert rule
    pub async fn delete_alert_rule(&self, id: &str) -> CoreResult<bool> {
        debug!("Deleting alert rule: {}", id);

        let mut rules = self.alert_rules.write().await;
        let removed = rules.remove(id).is_some();

        if removed {
            info!("Alert rule deleted: {}", id);
        }

        Ok(removed)
    }

    /// Get active alerts
    pub async fn get_active_alerts(&self) -> CoreResult<Vec<Alert>> {
        debug!("Getting active alerts");

        let alerts = self.active_alerts.read().await;
        Ok(alerts.values().cloned().collect())
    }

    /// Acknowledge alert
    pub async fn acknowledge_alert(&self, id: &str) -> CoreResult<bool> {
        debug!("Acknowledging alert: {}", id);

        let mut alerts = self.active_alerts.write().await;
        if let Some(alert) = alerts.get_mut(id) {
            alert.status = "resolved".to_string();
            alert.resolved_at = Some(chrono::Utc::now().to_rfc3339());
            info!("Alert acknowledged: {}", id);
            Ok(true)
        } else {
            Ok(false)
        }
    }

    // Private helper methods

    /// Analyze metrics for a specific file
    async fn analyze_file_metrics(&self, file_path: &str) -> CoreResult<MetricsData> {
        debug!("Analyzing metrics for file: {}", file_path);

        // Check cache first
        {
            let cache = self.metrics_cache.read().await;
            if let Some(cached_metrics) = cache.get(file_path) {
                return Ok(cached_metrics.clone());
            }
        }

        // Read file content
        let content = tokio::fs::read_to_string(file_path).await
            .map_err(|e| CodeGraphError::Io(e))?;

        // Analyze with the analyzer
        let mut analyzer = self.analyzer.write().await;
        let analysis_result = analyzer.analyze_file(file_path.as_ref(), &content).await?;

        // Convert to API format
        let metrics_data = self.convert_metrics_to_api_format(analysis_result, file_path).await?;

        // Cache the result
        {
            let mut cache = self.metrics_cache.write().await;
            cache.insert(file_path.to_string(), metrics_data.clone());
        }

        Ok(metrics_data)
    }

    /// Convert internal metrics format to API format
    async fn convert_metrics_to_api_format(
        &self,
        metrics: CodeMetrics,
        file_path: &str,
    ) -> CoreResult<MetricsData> {
        let complexity = ComplexityMetrics {
            cyclomatic_complexity: metrics.complexity.cyclomatic_complexity,
            cognitive_complexity: metrics.complexity.cognitive_complexity,
            halstead_complexity: HalsteadMetrics {
                distinct_operators: metrics.complexity.halstead_complexity.distinct_operators,
                distinct_operands: metrics.complexity.halstead_complexity.distinct_operands,
                total_operators: metrics.complexity.halstead_complexity.total_operators,
                total_operands: metrics.complexity.halstead_complexity.total_operands,
                vocabulary: metrics.complexity.halstead_complexity.vocabulary,
                length: metrics.complexity.halstead_complexity.length,
                calculated_length: metrics.complexity.halstead_complexity.calculated_length,
                volume: metrics.complexity.halstead_complexity.volume,
                difficulty: metrics.complexity.halstead_complexity.difficulty,
                effort: metrics.complexity.halstead_complexity.effort,
                time: metrics.complexity.halstead_complexity.time,
                bugs: metrics.complexity.halstead_complexity.bugs,
            },
            max_nesting_depth: metrics.complexity.max_nesting_depth,
            avg_nesting_depth: metrics.complexity.avg_nesting_depth,
            function_complexity: metrics.complexity.function_complexity
                .into_iter()
                .map(|f| FunctionComplexityMetrics {
                    name: f.name,
                    cyclomatic_complexity: f.cyclomatic_complexity,
                    cognitive_complexity: f.cognitive_complexity,
                    lines_of_code: f.lines_of_code,
                    start_line: f.start_line,
                    end_line: f.end_line,
                })
                .collect(),
        };

        let quality = QualityMetrics {
            duplication_percentage: metrics.quality.duplication_percentage,
            test_coverage: metrics.quality.test_coverage,
            technical_debt_ratio: metrics.quality.technical_debt_ratio,
            code_smells: metrics.quality.code_smells,
            security_issues: metrics.quality.security_issues,
            performance_issues: metrics.quality.performance_issues,
            documentation_coverage: metrics.quality.documentation_coverage,
        };

        let maintainability = MaintainabilityMetrics {
            maintainability_index: metrics.maintainability.maintainability_index,
            coupling: metrics.maintainability.coupling,
            cohesion: metrics.maintainability.cohesion,
            inheritance_depth: metrics.maintainability.inheritance_depth,
            responsibility_assignment: metrics.maintainability.responsibility_assignment,
            change_impact: metrics.maintainability.change_impact,
        };

        let performance = PerformanceMetrics {
            analysis_time_ms: metrics.performance.analysis_time_ms,
            memory_usage_bytes: metrics.performance.memory_usage_bytes,
            cpu_usage_percent: metrics.performance.cpu_usage_percent,
            io_operations: metrics.performance.io_operations,
            cache_hit_ratio: metrics.performance.cache_hit_ratio,
        };

        Ok(MetricsData {
            complexity,
            quality,
            maintainability,
            performance,
            timestamp: metrics.timestamp.to_rfc3339(),
            file_path: Some(file_path.to_string()),
        })
    }

    /// Filter metrics by time range
    async fn filter_by_time_range(
        &self,
        metrics: Vec<MetricsData>,
        time_range: &TimeRange,
    ) -> CoreResult<Vec<MetricsData>> {
        // TODO: Implement time range filtering
        // For now, return all metrics
        Ok(metrics)
    }

    /// Filter metrics by specific metric types
    async fn filter_by_metrics(
        &self,
        metrics: Vec<MetricsData>,
        _metrics_filter: &[String],
    ) -> CoreResult<Vec<MetricsData>> {
        // TODO: Implement metric filtering
        // For now, return all metrics
        Ok(metrics)
    }
}

impl Default for ApiHandlers {
    fn default() -> Self {
        // Create a default analyzer for testing
        let analyzer = AdvancedAnalyzer::new(Default::default());
        Self::new(analyzer)
    }
}
