use crate::{CodeMetrics, DateTime, Utc, Serialize, Deserialize};
use std::collections::{HashMap, VecDeque};
use statrs::statistics::{Data, Distribution, Min, Max};
use tracing::{debug};

/// Trend analyzer for tracking metrics over time
pub struct TrendAnalyzer {
    /// Historical metrics data
    history: VecDeque<TimestampedMetrics>,
    /// Maximum history size
    max_history_size: usize,
    /// Trend calculation configuration
    config: TrendConfig,
}

/// Configuration for trend analysis
#[derive(Debug, Clone)]
pub struct TrendConfig {
    /// Window size for trend calculation
    pub window_size: usize,
    /// Minimum data points required for trend analysis
    pub min_data_points: usize,
    /// Smoothing factor for exponential moving average
    pub smoothing_factor: f64,
    /// Enable anomaly detection
    pub enable_anomaly_detection: bool,
    /// Anomaly detection threshold (standard deviations)
    pub anomaly_threshold: f64,
}

impl Default for TrendConfig {
    fn default() -> Self {
        Self {
            window_size: 30,
            min_data_points: 5,
            smoothing_factor: 0.2,
            enable_anomaly_detection: true,
            anomaly_threshold: 2.0,
        }
    }
}

/// Metrics with timestamp
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimestampedMetrics {
    /// The metrics data
    pub metrics: CodeMetrics,
    /// When the metrics were recorded
    pub timestamp: DateTime<Utc>,
    /// Optional metadata
    pub metadata: HashMap<String, String>,
}

/// Trend analysis result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrendAnalysis {
    /// Metric name
    pub metric_name: String,
    /// Current value
    pub current_value: f64,
    /// Previous value
    pub previous_value: Option<f64>,
    /// Trend direction
    pub trend: TrendDirection,
    /// Trend strength (0.0 to 1.0)
    pub trend_strength: f64,
    /// Rate of change
    pub rate_of_change: f64,
    /// Moving average
    pub moving_average: f64,
    /// Exponential moving average
    pub exponential_moving_average: f64,
    /// Volatility measure
    pub volatility: f64,
    /// Anomaly detection result
    pub anomaly: Option<AnomalyDetection>,
    /// Forecast for next period
    pub forecast: Option<f64>,
    /// Confidence interval for forecast
    pub confidence_interval: Option<(f64, f64)>,
}

/// Trend direction
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum TrendDirection {
    Increasing,
    Decreasing,
    Stable,
    Volatile,
    Unknown,
}

/// Anomaly detection result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnomalyDetection {
    /// Whether an anomaly was detected
    pub is_anomaly: bool,
    /// Anomaly score
    pub score: f64,
    /// Expected value
    pub expected_value: f64,
    /// Actual value
    pub actual_value: f64,
    /// Deviation from expected
    pub deviation: f64,
    /// Anomaly type
    pub anomaly_type: AnomalyType,
}

/// Types of anomalies
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AnomalyType {
    Spike,
    Drop,
    Drift,
    Outlier,
}

/// Comprehensive trend report
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrendReport {
    /// Report generation timestamp
    pub timestamp: DateTime<Utc>,
    /// Time period covered
    pub period: TimePeriod,
    /// Individual metric trends
    pub metric_trends: HashMap<String, TrendAnalysis>,
    /// Overall quality trend
    pub overall_quality_trend: TrendAnalysis,
    /// Key insights
    pub insights: Vec<TrendInsight>,
    /// Recommendations
    pub recommendations: Vec<TrendRecommendation>,
    /// Statistical summary
    pub statistics: TrendStatistics,
}

/// Time period for analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimePeriod {
    /// Start time
    pub start: DateTime<Utc>,
    /// End time
    pub end: DateTime<Utc>,
    /// Number of data points
    pub data_points: usize,
}

/// Trend insight
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrendInsight {
    /// Insight type
    pub insight_type: InsightType,
    /// Description
    pub description: String,
    /// Confidence level
    pub confidence: f64,
    /// Supporting metrics
    pub supporting_metrics: Vec<String>,
}

/// Types of insights
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum InsightType {
    QualityImprovement,
    QualityDegradation,
    ComplexityIncrease,
    ComplexityDecrease,
    TechnicalDebtGrowth,
    TechnicalDebtReduction,
    PerformanceImprovement,
    PerformanceDegradation,
    TestCoverageChange,
    DocumentationChange,
}

/// Trend recommendation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrendRecommendation {
    /// Recommendation type
    pub recommendation_type: RecommendationType,
    /// Description
    pub description: String,
    /// Priority level
    pub priority: RecommendationPriority,
    /// Expected impact
    pub expected_impact: String,
    /// Effort estimate
    pub effort_estimate: EffortEstimate,
}

/// Types of recommendations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum RecommendationType {
    RefactorCode,
    IncreaseTestCoverage,
    ReduceComplexity,
    ImproveDocumentation,
    AddressSecurityIssues,
    OptimizePerformance,
    ReduceTechnicalDebt,
    CodeReview,
}

/// Recommendation priority
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub enum RecommendationPriority {
    Low,
    Medium,
    High,
    Critical,
}

/// Effort estimate
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum EffortEstimate {
    Low,      // < 1 day
    Medium,   // 1-5 days
    High,     // 1-2 weeks
    VeryHigh, // > 2 weeks
}

/// Statistical summary of trends
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrendStatistics {
    /// Mean values for each metric
    pub means: HashMap<String, f64>,
    /// Standard deviations
    pub standard_deviations: HashMap<String, f64>,
    /// Minimum values
    pub minimums: HashMap<String, f64>,
    /// Maximum values
    pub maximums: HashMap<String, f64>,
    /// Correlation matrix
    pub correlations: HashMap<String, HashMap<String, f64>>,
    /// Trend slopes
    pub slopes: HashMap<String, f64>,
}

impl TrendAnalyzer {
    /// Create a new trend analyzer
    pub fn new(config: TrendConfig) -> Self {
        Self {
            history: VecDeque::new(),
            max_history_size: config.window_size * 10, // Keep more history for better analysis
            config,
        }
    }

    /// Add new metrics to the history
    pub fn add_metrics(&mut self, metrics: CodeMetrics, metadata: Option<HashMap<String, String>>) {
        let timestamped = TimestampedMetrics {
            metrics,
            timestamp: Utc::now(),
            metadata: metadata.unwrap_or_default(),
        };

        self.history.push_back(timestamped);

        // Maintain maximum history size
        while self.history.len() > self.max_history_size {
            self.history.pop_front();
        }

        debug!("Added metrics to trend history. Total history size: {}", self.history.len());
    }

    /// Analyze trends for all metrics
    pub async fn analyze_trends(&self) -> Result<TrendReport, Box<dyn std::error::Error>> {
        if self.history.len() < self.config.min_data_points {
            return Err("Insufficient data points for trend analysis".into());
        }

        debug!("Analyzing trends for {} data points", self.history.len());

        let mut metric_trends = HashMap::new();

        // Analyze individual metrics
        metric_trends.insert("cyclomatic_complexity".to_string(), 
                           self.analyze_metric_trend("cyclomatic_complexity").await?);
        metric_trends.insert("cognitive_complexity".to_string(), 
                           self.analyze_metric_trend("cognitive_complexity").await?);
        metric_trends.insert("lines_of_code".to_string(), 
                           self.analyze_metric_trend("lines_of_code").await?);
        metric_trends.insert("test_coverage".to_string(), 
                           self.analyze_metric_trend("test_coverage").await?);
        metric_trends.insert("duplication_percentage".to_string(), 
                           self.analyze_metric_trend("duplication_percentage").await?);
        metric_trends.insert("technical_debt_ratio".to_string(), 
                           self.analyze_metric_trend("technical_debt_ratio").await?);
        metric_trends.insert("maintainability_index".to_string(), 
                           self.analyze_metric_trend("maintainability_index").await?);

        // Analyze overall quality trend
        let overall_quality_trend = self.analyze_overall_quality_trend().await?;

        // Generate insights
        let insights = self.generate_insights(&metric_trends).await?;

        // Generate recommendations
        let recommendations = self.generate_recommendations(&metric_trends, &insights).await?;

        // Calculate statistics
        let statistics = self.calculate_statistics().await?;

        let period = TimePeriod {
            start: self.history.front().unwrap().timestamp,
            end: self.history.back().unwrap().timestamp,
            data_points: self.history.len(),
        };

        Ok(TrendReport {
            timestamp: Utc::now(),
            period,
            metric_trends,
            overall_quality_trend,
            insights,
            recommendations,
            statistics,
        })
    }

    /// Analyze trend for a specific metric
    async fn analyze_metric_trend(&self, metric_name: &str) -> Result<TrendAnalysis, Box<dyn std::error::Error>> {
        let values = self.extract_metric_values(metric_name);
        
        if values.is_empty() {
            return Err(format!("No data for metric: {}", metric_name).into());
        }

        let current_value = *values.last().unwrap();
        let previous_value = if values.len() > 1 { Some(values[values.len() - 2]) } else { None };

        // Calculate trend direction and strength
        let (trend, trend_strength) = self.calculate_trend_direction(&values);

        // Calculate rate of change
        let rate_of_change = if let Some(prev) = previous_value {
            if prev != 0.0 {
                ((current_value - prev) / prev) * 100.0
            } else {
                0.0
            }
        } else {
            0.0
        };

        // Calculate moving averages
        let moving_average = self.calculate_moving_average(&values);
        let exponential_moving_average = self.calculate_exponential_moving_average(&values);

        // Calculate volatility
        let volatility = self.calculate_volatility(&values);

        // Detect anomalies
        let anomaly = if self.config.enable_anomaly_detection {
            self.detect_anomaly(&values, current_value)
        } else {
            None
        };

        // Generate forecast
        let (forecast, confidence_interval) = self.generate_forecast(&values);

        Ok(TrendAnalysis {
            metric_name: metric_name.to_string(),
            current_value,
            previous_value,
            trend,
            trend_strength,
            rate_of_change,
            moving_average,
            exponential_moving_average,
            volatility,
            anomaly,
            forecast,
            confidence_interval,
        })
    }

    /// Extract values for a specific metric from history
    fn extract_metric_values(&self, metric_name: &str) -> Vec<f64> {
        self.history.iter().map(|entry| {
            match metric_name {
                "cyclomatic_complexity" => entry.metrics.complexity.cyclomatic_complexity,
                "cognitive_complexity" => entry.metrics.complexity.cognitive_complexity,
                "lines_of_code" => entry.metrics.lines_of_code.total_lines as f64,
                "test_coverage" => entry.metrics.quality.test_coverage,
                "duplication_percentage" => entry.metrics.quality.duplication_percentage,
                "technical_debt_ratio" => entry.metrics.quality.technical_debt_ratio,
                "maintainability_index" => entry.metrics.maintainability.maintainability_index,
                _ => 0.0,
            }
        }).collect()
    }

    /// Calculate trend direction and strength
    fn calculate_trend_direction(&self, values: &[f64]) -> (TrendDirection, f64) {
        if values.len() < 2 {
            return (TrendDirection::Unknown, 0.0);
        }

        // Calculate linear regression slope
        let n = values.len() as f64;
        let x_values: Vec<f64> = (0..values.len()).map(|i| i as f64).collect();
        
        let x_mean = x_values.iter().sum::<f64>() / n;
        let y_mean = values.iter().sum::<f64>() / n;
        
        let numerator: f64 = x_values.iter().zip(values.iter())
            .map(|(x, y)| (x - x_mean) * (y - y_mean))
            .sum();
        
        let denominator: f64 = x_values.iter()
            .map(|x| (x - x_mean).powi(2))
            .sum();
        
        if denominator == 0.0 {
            return (TrendDirection::Stable, 0.0);
        }
        
        let slope = numerator / denominator;
        let strength = slope.abs().min(1.0);
        
        let direction = if slope > 0.1 {
            TrendDirection::Increasing
        } else if slope < -0.1 {
            TrendDirection::Decreasing
        } else if self.calculate_volatility(values) > 0.2 {
            TrendDirection::Volatile
        } else {
            TrendDirection::Stable
        };
        
        (direction, strength)
    }

    /// Calculate moving average
    fn calculate_moving_average(&self, values: &[f64]) -> f64 {
        let window_size = self.config.window_size.min(values.len());
        let start_index = values.len().saturating_sub(window_size);
        
        values[start_index..].iter().sum::<f64>() / (values.len() - start_index) as f64
    }

    /// Calculate exponential moving average
    fn calculate_exponential_moving_average(&self, values: &[f64]) -> f64 {
        if values.is_empty() {
            return 0.0;
        }
        
        let alpha = self.config.smoothing_factor;
        let mut ema = values[0];
        
        for &value in &values[1..] {
            ema = alpha * value + (1.0 - alpha) * ema;
        }
        
        ema
    }

    /// Calculate volatility (standard deviation)
    fn calculate_volatility(&self, values: &[f64]) -> f64 {
        if values.len() < 2 {
            return 0.0;
        }
        
        let data = Data::new(values.to_vec());
        data.std_dev().unwrap_or(0.0)
    }

    /// Detect anomalies in the data
    fn detect_anomaly(&self, values: &[f64], current_value: f64) -> Option<AnomalyDetection> {
        if values.len() < self.config.min_data_points {
            return None;
        }
        
        let data = Data::new(values.to_vec());
        let mean = data.mean().unwrap_or(0.0);
        let std_dev = data.std_dev().unwrap_or(0.0);
        
        if std_dev == 0.0 {
            return None;
        }
        
        let z_score = (current_value - mean) / std_dev;
        let is_anomaly = z_score.abs() > self.config.anomaly_threshold;
        
        if is_anomaly {
            let anomaly_type = if z_score > self.config.anomaly_threshold {
                AnomalyType::Spike
            } else {
                AnomalyType::Drop
            };
            
            Some(AnomalyDetection {
                is_anomaly: true,
                score: z_score.abs(),
                expected_value: mean,
                actual_value: current_value,
                deviation: current_value - mean,
                anomaly_type,
            })
        } else {
            None
        }
    }

    /// Generate forecast for next period
    fn generate_forecast(&self, values: &[f64]) -> (Option<f64>, Option<(f64, f64)>) {
        if values.len() < self.config.min_data_points {
            return (None, None);
        }
        
        // Simple linear extrapolation
        let n = values.len() as f64;
        let x_values: Vec<f64> = (0..values.len()).map(|i| i as f64).collect();
        
        let x_mean = x_values.iter().sum::<f64>() / n;
        let y_mean = values.iter().sum::<f64>() / n;
        
        let numerator: f64 = x_values.iter().zip(values.iter())
            .map(|(x, y)| (x - x_mean) * (y - y_mean))
            .sum();
        
        let denominator: f64 = x_values.iter()
            .map(|x| (x - x_mean).powi(2))
            .sum();
        
        if denominator == 0.0 {
            return (None, None);
        }
        
        let slope = numerator / denominator;
        let intercept = y_mean - slope * x_mean;
        
        let forecast = slope * n + intercept;
        
        // Calculate confidence interval (simplified)
        let std_dev = self.calculate_volatility(values);
        let confidence_interval = (
            forecast - 1.96 * std_dev,
            forecast + 1.96 * std_dev,
        );
        
        (Some(forecast), Some(confidence_interval))
    }

    /// Analyze overall quality trend
    async fn analyze_overall_quality_trend(&self) -> Result<TrendAnalysis, Box<dyn std::error::Error>> {
        let quality_scores: Vec<f64> = self.history.iter()
            .map(|entry| entry.metrics.calculate_quality_score())
            .collect();
        
        self.analyze_values_trend("overall_quality", &quality_scores).await
    }

    /// Analyze trend for a vector of values
    async fn analyze_values_trend(&self, metric_name: &str, values: &[f64]) -> Result<TrendAnalysis, Box<dyn std::error::Error>> {
        if values.is_empty() {
            return Err(format!("No data for metric: {}", metric_name).into());
        }

        let current_value = *values.last().unwrap();
        let previous_value = if values.len() > 1 { Some(values[values.len() - 2]) } else { None };

        let (trend, trend_strength) = self.calculate_trend_direction(values);
        
        let rate_of_change = if let Some(prev) = previous_value {
            if prev != 0.0 {
                ((current_value - prev) / prev) * 100.0
            } else {
                0.0
            }
        } else {
            0.0
        };

        let moving_average = self.calculate_moving_average(values);
        let exponential_moving_average = self.calculate_exponential_moving_average(values);
        let volatility = self.calculate_volatility(values);

        let anomaly = if self.config.enable_anomaly_detection {
            self.detect_anomaly(values, current_value)
        } else {
            None
        };

        let (forecast, confidence_interval) = self.generate_forecast(values);

        Ok(TrendAnalysis {
            metric_name: metric_name.to_string(),
            current_value,
            previous_value,
            trend,
            trend_strength,
            rate_of_change,
            moving_average,
            exponential_moving_average,
            volatility,
            anomaly,
            forecast,
            confidence_interval,
        })
    }

    /// Generate insights from trend analysis
    async fn generate_insights(&self, metric_trends: &HashMap<String, TrendAnalysis>) -> Result<Vec<TrendInsight>, Box<dyn std::error::Error>> {
        let mut insights = Vec::new();

        // Check for quality improvements
        if let Some(quality_trend) = metric_trends.get("overall_quality") {
            if quality_trend.trend == TrendDirection::Increasing && quality_trend.trend_strength > 0.3 {
                insights.push(TrendInsight {
                    insight_type: InsightType::QualityImprovement,
                    description: "Overall code quality is improving".to_string(),
                    confidence: quality_trend.trend_strength,
                    supporting_metrics: vec!["overall_quality".to_string()],
                });
            }
        }

        // Check for complexity increases
        if let Some(complexity_trend) = metric_trends.get("cyclomatic_complexity") {
            if complexity_trend.trend == TrendDirection::Increasing && complexity_trend.trend_strength > 0.4 {
                insights.push(TrendInsight {
                    insight_type: InsightType::ComplexityIncrease,
                    description: "Code complexity is increasing".to_string(),
                    confidence: complexity_trend.trend_strength,
                    supporting_metrics: vec!["cyclomatic_complexity".to_string()],
                });
            }
        }

        // Check for technical debt growth
        if let Some(debt_trend) = metric_trends.get("technical_debt_ratio") {
            if debt_trend.trend == TrendDirection::Increasing && debt_trend.trend_strength > 0.3 {
                insights.push(TrendInsight {
                    insight_type: InsightType::TechnicalDebtGrowth,
                    description: "Technical debt is accumulating".to_string(),
                    confidence: debt_trend.trend_strength,
                    supporting_metrics: vec!["technical_debt_ratio".to_string()],
                });
            }
        }

        Ok(insights)
    }

    /// Generate recommendations based on trends and insights
    async fn generate_recommendations(
        &self,
        metric_trends: &HashMap<String, TrendAnalysis>,
        insights: &[TrendInsight],
    ) -> Result<Vec<TrendRecommendation>, Box<dyn std::error::Error>> {
        let mut recommendations = Vec::new();

        // Recommendations based on insights
        for insight in insights {
            match insight.insight_type {
                InsightType::ComplexityIncrease => {
                    recommendations.push(TrendRecommendation {
                        recommendation_type: RecommendationType::ReduceComplexity,
                        description: "Consider refactoring complex functions to reduce cyclomatic complexity".to_string(),
                        priority: RecommendationPriority::High,
                        expected_impact: "Improved maintainability and reduced bug risk".to_string(),
                        effort_estimate: EffortEstimate::Medium,
                    });
                }
                InsightType::TechnicalDebtGrowth => {
                    recommendations.push(TrendRecommendation {
                        recommendation_type: RecommendationType::ReduceTechnicalDebt,
                        description: "Address accumulating technical debt through code cleanup".to_string(),
                        priority: RecommendationPriority::Medium,
                        expected_impact: "Better code maintainability and development velocity".to_string(),
                        effort_estimate: EffortEstimate::High,
                    });
                }
                _ => {}
            }
        }

        // Recommendations based on specific metrics
        if let Some(coverage_trend) = metric_trends.get("test_coverage") {
            if coverage_trend.current_value < 80.0 {
                recommendations.push(TrendRecommendation {
                    recommendation_type: RecommendationType::IncreaseTestCoverage,
                    description: format!("Increase test coverage from {:.1}% to at least 80%", coverage_trend.current_value),
                    priority: RecommendationPriority::High,
                    expected_impact: "Reduced bug risk and improved code reliability".to_string(),
                    effort_estimate: EffortEstimate::Medium,
                });
            }
        }

        Ok(recommendations)
    }

    /// Calculate statistical summary
    async fn calculate_statistics(&self) -> Result<TrendStatistics, Box<dyn std::error::Error>> {
        let metric_names = vec![
            "cyclomatic_complexity", "cognitive_complexity", "lines_of_code",
            "test_coverage", "duplication_percentage", "technical_debt_ratio",
            "maintainability_index"
        ];

        let mut means = HashMap::new();
        let mut standard_deviations = HashMap::new();
        let mut minimums = HashMap::new();
        let mut maximums = HashMap::new();
        let mut slopes = HashMap::new();

        for metric_name in &metric_names {
            let values = self.extract_metric_values(metric_name);
            if !values.is_empty() {
                let data = Data::new(values.clone());
                means.insert(metric_name.to_string(), data.mean().unwrap_or(0.0));
                standard_deviations.insert(metric_name.to_string(), data.std_dev().unwrap_or(0.0));
                minimums.insert(metric_name.to_string(), data.min());
                maximums.insert(metric_name.to_string(), data.max());
                
                let (_, slope) = self.calculate_trend_direction(&values);
                slopes.insert(metric_name.to_string(), slope);
            }
        }

        // Calculate correlations (simplified)
        let correlations = HashMap::new(); // TODO: Implement correlation calculation

        Ok(TrendStatistics {
            means,
            standard_deviations,
            minimums,
            maximums,
            correlations,
            slopes,
        })
    }
}
