use crate::{
    metrics::*, complexity::*, quality::*, trends::*, reports::*,
    DateTime, Utc,
};
use codegraph_core::{SyntaxNode, Result as CoreResult, CodeGraphError};
use std::collections::HashMap;
use std::path::Path;
use tracing::{debug, info, warn, error};
use tokio::fs;

/// Advanced analytics analyzer
pub struct AdvancedAnalyzer {
    /// Metrics configuration
    metrics_config: MetricsConfig,
    /// Complexity analyzer
    complexity_analyzer: ComplexityAnalyzer,
    /// Quality analyzer
    quality_analyzer: QualityAnalyzer,
    /// Trend analyzer
    trend_analyzer: TrendAnalyzer,
    /// Report generator
    report_generator: ReportGenerator,
    /// Analysis cache
    cache: HashMap<String, CodeMetrics>,
}

/// Analysis configuration
#[derive(Debug, Clone)]
pub struct AnalysisConfig {
    /// Metrics configuration
    pub metrics: MetricsConfig,
    /// Complexity analysis configuration
    pub complexity: ComplexityConfig,
    /// Quality analysis configuration
    pub quality: QualityConfig,
    /// Trend analysis configuration
    pub trends: TrendConfig,
    /// Report configuration
    pub reports: ReportConfiguration,
    /// Enable caching
    pub enable_caching: bool,
    /// Cache size limit
    pub cache_size_limit: usize,
}

impl Default for AnalysisConfig {
    fn default() -> Self {
        Self {
            metrics: MetricsConfig::default(),
            complexity: ComplexityConfig::default(),
            quality: QualityConfig::default(),
            trends: TrendConfig::default(),
            reports: ReportConfiguration::default(),
            enable_caching: true,
            cache_size_limit: 1000,
        }
    }
}

/// Analysis result
#[derive(Debug, Clone)]
pub struct AnalysisResult {
    /// Computed metrics
    pub metrics: CodeMetrics,
    /// Quality issues found
    pub quality_issues: Vec<QualityIssue>,
    /// Analysis metadata
    pub metadata: AnalysisMetadata,
}

/// Analysis metadata
#[derive(Debug, Clone)]
pub struct AnalysisMetadata {
    /// Analysis start time
    pub start_time: DateTime<Utc>,
    /// Analysis end time
    pub end_time: DateTime<Utc>,
    /// Duration in milliseconds
    pub duration_ms: u64,
    /// Files analyzed
    pub files_analyzed: usize,
    /// Total lines processed
    pub lines_processed: usize,
    /// Languages detected
    pub languages: Vec<String>,
    /// Errors encountered
    pub errors: Vec<String>,
    /// Warnings generated
    pub warnings: Vec<String>,
}

impl AdvancedAnalyzer {
    /// Create a new advanced analyzer
    pub fn new(config: AnalysisConfig) -> Self {
        Self {
            metrics_config: config.metrics.clone(),
            complexity_analyzer: ComplexityAnalyzer::new(config.complexity),
            quality_analyzer: QualityAnalyzer::new(config.quality),
            trend_analyzer: TrendAnalyzer::new(config.trends),
            report_generator: ReportGenerator::new(config.reports),
            cache: HashMap::new(),
        }
    }

    /// Analyze a single file
    pub async fn analyze_file(
        &mut self,
        file_path: &Path,
        content: &str,
        syntax_tree: &dyn SyntaxNode,
        language: &str,
    ) -> CoreResult<AnalysisResult> {
        let start_time = Utc::now();
        info!("Starting analysis for file: {}", file_path.display());

        let mut metadata = AnalysisMetadata {
            start_time,
            end_time: start_time,
            duration_ms: 0,
            files_analyzed: 1,
            lines_processed: content.lines().count(),
            languages: vec![language.to_string()],
            errors: Vec::new(),
            warnings: Vec::new(),
        };

        // Check cache first
        let cache_key = format!("{}:{}", file_path.display(), self.calculate_content_hash(content));
        if let Some(cached_metrics) = self.cache.get(&cache_key) {
            debug!("Using cached metrics for file: {}", file_path.display());
            let end_time = Utc::now();
            metadata.end_time = end_time;
            metadata.duration_ms = (end_time - start_time).num_milliseconds() as u64;
            
            return Ok(AnalysisResult {
                metrics: cached_metrics.clone(),
                quality_issues: Vec::new(), // TODO: Cache quality issues too
                metadata,
            });
        }

        let mut metrics = CodeMetrics::new();

        // Analyze lines of code
        if self.metrics_config.enable_complexity {
            metrics.lines_of_code = self.analyze_lines_of_code(content, file_path.to_string_lossy().as_ref()).await?;
        }

        // Analyze complexity
        if self.metrics_config.enable_complexity {
            match self.complexity_analyzer.analyze_complexity(syntax_tree, language).await {
                Ok(complexity_metrics) => {
                    metrics.complexity = complexity_metrics;
                }
                Err(e) => {
                    let error_msg = format!("Complexity analysis failed: {}", e);
                    error!("{}", error_msg);
                    metadata.errors.push(error_msg);
                }
            }
        }

        // Analyze quality
        let mut quality_issues = Vec::new();
        if self.metrics_config.enable_quality {
            match self.quality_analyzer.analyze_quality(syntax_tree, file_path.to_string_lossy().as_ref(), content).await {
                Ok((quality_metrics, issues)) => {
                    metrics.quality = quality_metrics;
                    quality_issues = issues;
                }
                Err(e) => {
                    let error_msg = format!("Quality analysis failed: {}", e);
                    error!("{}", error_msg);
                    metadata.errors.push(error_msg);
                }
            }
        }

        // Analyze maintainability
        if self.metrics_config.enable_maintainability {
            metrics.maintainability = self.analyze_maintainability(&metrics).await?;
        }

        // Record performance metrics
        if self.metrics_config.enable_performance {
            let end_time = Utc::now();
            let duration = (end_time - start_time).num_milliseconds() as u64;
            
            metrics.performance = PerformanceMetrics {
                analysis_time_ms: duration,
                memory_usage_bytes: self.estimate_memory_usage(),
                cpu_usage_percent: 0.0, // TODO: Implement actual CPU monitoring
                io_operations: 1, // File read
                cache_hit_ratio: if self.cache.contains_key(&cache_key) { 1.0 } else { 0.0 },
            };
        }

        // Update metadata
        let end_time = Utc::now();
        metadata.end_time = end_time;
        metadata.duration_ms = (end_time - start_time).num_milliseconds() as u64;

        // Cache the results
        self.cache.insert(cache_key, metrics.clone());

        info!("Analysis completed for file: {} in {}ms", file_path.display(), metadata.duration_ms);

        Ok(AnalysisResult {
            metrics,
            quality_issues,
            metadata,
        })
    }

    /// Analyze multiple files
    pub async fn analyze_project(
        &mut self,
        project_path: &Path,
        file_patterns: &[String],
    ) -> CoreResult<Vec<AnalysisResult>> {
        info!("Starting project analysis for: {}", project_path.display());

        let files = self.discover_files(project_path, file_patterns).await?;
        let mut results = Vec::new();

        for file_path in files {
            match self.analyze_file_from_path(&file_path).await {
                Ok(result) => results.push(result),
                Err(e) => {
                    warn!("Failed to analyze file {}: {}", file_path.display(), e);
                }
            }
        }

        info!("Project analysis completed. Analyzed {} files", results.len());
        Ok(results)
    }

    /// Generate comprehensive report
    pub async fn generate_report(
        &mut self,
        analysis_results: &[AnalysisResult],
        project_name: &str,
    ) -> CoreResult<AnalyticsReport> {
        info!("Generating comprehensive analytics report");

        // Aggregate metrics from all files
        let aggregated_metrics = self.aggregate_metrics(analysis_results).await?;
        
        // Collect all quality issues
        let all_quality_issues: Vec<QualityIssue> = analysis_results
            .iter()
            .flat_map(|result| result.quality_issues.clone())
            .collect();

        // Generate trend report if we have historical data
        let trend_report = if self.trend_analyzer.has_sufficient_data() {
            Some(self.trend_analyzer.analyze_trends().await.map_err(|e| {
                CodeGraphError::parse_error(format!("Trend analysis failed: {}", e))
            })?)
        } else {
            None
        };

        // Create analysis scope
        let scope = AnalysisScope {
            project_name: project_name.to_string(),
            files_analyzed: analysis_results.len(),
            total_lines: aggregated_metrics.lines_of_code.total_lines,
            languages: self.extract_languages(analysis_results),
            time_period: trend_report.as_ref().map(|tr| {
                format!("{} to {}", 
                    tr.period.start.format("%Y-%m-%d"),
                    tr.period.end.format("%Y-%m-%d"))
            }),
        };

        // Generate the report
        let report = self.report_generator.generate_report(
            aggregated_metrics,
            all_quality_issues,
            trend_report,
            scope,
        ).await.map_err(|e| {
            CodeGraphError::parse_error(format!("Report generation failed: {}", e))
        })?;

        info!("Analytics report generated successfully");
        Ok(report)
    }

    /// Add metrics to trend analysis
    pub fn add_to_trends(&mut self, metrics: &CodeMetrics, metadata: Option<HashMap<String, String>>) {
        self.trend_analyzer.add_metrics(metrics.clone(), metadata);
    }

    /// Analyze lines of code
    async fn analyze_lines_of_code(&self, content: &str, file_path: &str) -> CoreResult<LinesOfCodeMetrics> {
        let lines: Vec<&str> = content.lines().collect();
        let total_lines = lines.len();
        
        let mut code_lines = 0;
        let mut comment_lines = 0;
        let mut blank_lines = 0;
        let mut mixed_lines = 0;

        for line in &lines {
            let trimmed = line.trim();
            
            if trimmed.is_empty() {
                blank_lines += 1;
            } else if self.is_comment_line(trimmed) {
                if self.has_code_content(trimmed) {
                    mixed_lines += 1;
                } else {
                    comment_lines += 1;
                }
            } else {
                code_lines += 1;
            }
        }

        let file_metrics = FileLineMetrics {
            total: total_lines,
            code: code_lines,
            comments: comment_lines,
            blank: blank_lines,
            mixed: mixed_lines,
        };

        let mut per_file = HashMap::new();
        per_file.insert(file_path.to_string(), file_metrics);

        Ok(LinesOfCodeMetrics {
            total_lines,
            code_lines,
            comment_lines,
            blank_lines,
            mixed_lines,
            per_file,
        })
    }

    /// Check if a line is a comment
    fn is_comment_line(&self, line: &str) -> bool {
        line.starts_with("//") || line.starts_with('#') || 
        line.starts_with("/*") || line.starts_with('*') ||
        line.starts_with("\"\"\"") || line.starts_with("'''")
    }

    /// Check if a line has code content (for mixed lines)
    fn has_code_content(&self, line: &str) -> bool {
        // Simple heuristic: if line has non-comment content after comment markers
        if let Some(comment_pos) = line.find("//") {
            line[..comment_pos].trim().len() > 0
        } else if let Some(comment_pos) = line.find('#') {
            line[..comment_pos].trim().len() > 0
        } else {
            false
        }
    }

    /// Analyze maintainability
    async fn analyze_maintainability(&self, metrics: &CodeMetrics) -> CoreResult<MaintainabilityMetrics> {
        // Calculate maintainability index using Halstead metrics and complexity
        let halstead_volume = metrics.complexity.halstead_complexity.volume;
        let cyclomatic_complexity = metrics.complexity.cyclomatic_complexity;
        let lines_of_code = metrics.lines_of_code.code_lines as f64;

        // Maintainability Index formula (simplified)
        let maintainability_index = if lines_of_code > 0.0 && halstead_volume > 0.0 {
            let mi = 171.0 - 5.2 * halstead_volume.ln() - 0.23 * cyclomatic_complexity - 16.2 * lines_of_code.ln();
            mi.max(0.0).min(100.0)
        } else {
            100.0
        };

        // Calculate coupling (simplified - based on complexity)
        let coupling = (cyclomatic_complexity / 10.0).min(1.0);

        // Calculate cohesion (simplified - inverse of complexity)
        let cohesion = (1.0 - coupling) * 100.0;

        Ok(MaintainabilityMetrics {
            maintainability_index,
            coupling,
            cohesion,
            inheritance_depth: 0.0, // TODO: Implement inheritance analysis
            responsibility_assignment: 100.0 - coupling * 50.0,
            change_impact: coupling * 100.0,
        })
    }

    /// Discover files matching patterns
    async fn discover_files(&self, project_path: &Path, patterns: &[String]) -> CoreResult<Vec<std::path::PathBuf>> {
        let mut files = Vec::new();
        
        // Simple file discovery - in a real implementation, you'd use glob patterns
        let mut entries = fs::read_dir(project_path).await.map_err(|e| {
            CodeGraphError::storage_error(format!("Failed to read directory: {}", e))
        })?;

        while let Some(entry) = entries.next_entry().await.map_err(|e| {
            CodeGraphError::storage_error(format!("Failed to read directory entry: {}", e))
        })? {
            let path = entry.path();
            
            if path.is_file() {
                if let Some(extension) = path.extension() {
                    let ext_str = extension.to_string_lossy();
                    if patterns.iter().any(|pattern| pattern.contains(&ext_str.to_string())) {
                        files.push(path);
                    }
                }
            }
        }

        Ok(files)
    }

    /// Analyze file from path
    async fn analyze_file_from_path(&mut self, file_path: &Path) -> CoreResult<AnalysisResult> {
        let content = fs::read_to_string(file_path).await.map_err(|e| {
            CodeGraphError::storage_error(format!("Failed to read file: {}", e))
        })?;

        // Detect language from file extension
        let language = self.detect_language(file_path);

        // For this example, we'll create a mock syntax tree
        // In a real implementation, you'd parse the content with Tree-sitter
        let mock_tree = MockSyntaxNode::new("program", content.clone());

        self.analyze_file(file_path, &content, &mock_tree, &language).await
    }

    /// Detect programming language from file extension
    fn detect_language(&self, file_path: &Path) -> String {
        if let Some(extension) = file_path.extension() {
            match extension.to_string_lossy().as_ref() {
                "rs" => "rust".to_string(),
                "py" => "python".to_string(),
                "js" => "javascript".to_string(),
                "ts" => "typescript".to_string(),
                "java" => "java".to_string(),
                "cpp" | "cc" | "cxx" => "cpp".to_string(),
                "c" => "c".to_string(),
                "go" => "go".to_string(),
                _ => "unknown".to_string(),
            }
        } else {
            "unknown".to_string()
        }
    }

    /// Calculate content hash for caching
    fn calculate_content_hash(&self, content: &str) -> u64 {
        use std::collections::hash_map::DefaultHasher;
        use std::hash::{Hash, Hasher};
        
        let mut hasher = DefaultHasher::new();
        content.hash(&mut hasher);
        hasher.finish()
    }

    /// Estimate memory usage
    fn estimate_memory_usage(&self) -> u64 {
        // Simple estimation based on cache size
        (self.cache.len() * 1024) as u64 // Rough estimate
    }

    /// Aggregate metrics from multiple analysis results
    async fn aggregate_metrics(&self, results: &[AnalysisResult]) -> CoreResult<CodeMetrics> {
        if results.is_empty() {
            return Ok(CodeMetrics::new());
        }

        let mut aggregated = CodeMetrics::new();

        // Aggregate lines of code
        for result in results {
            aggregated.lines_of_code.total_lines += result.metrics.lines_of_code.total_lines;
            aggregated.lines_of_code.code_lines += result.metrics.lines_of_code.code_lines;
            aggregated.lines_of_code.comment_lines += result.metrics.lines_of_code.comment_lines;
            aggregated.lines_of_code.blank_lines += result.metrics.lines_of_code.blank_lines;
            aggregated.lines_of_code.mixed_lines += result.metrics.lines_of_code.mixed_lines;
            
            // Merge per-file metrics
            for (file, metrics) in &result.metrics.lines_of_code.per_file {
                aggregated.lines_of_code.per_file.insert(file.clone(), metrics.clone());
            }
        }

        // Average complexity metrics
        let file_count = results.len() as f64;
        aggregated.complexity.cyclomatic_complexity = results.iter()
            .map(|r| r.metrics.complexity.cyclomatic_complexity)
            .sum::<f64>() / file_count;
        
        aggregated.complexity.cognitive_complexity = results.iter()
            .map(|r| r.metrics.complexity.cognitive_complexity)
            .sum::<f64>() / file_count;

        // Aggregate quality metrics
        aggregated.quality.duplication_percentage = results.iter()
            .map(|r| r.metrics.quality.duplication_percentage)
            .sum::<f64>() / file_count;

        aggregated.quality.test_coverage = results.iter()
            .map(|r| r.metrics.quality.test_coverage)
            .sum::<f64>() / file_count;

        // Sum issue counts
        aggregated.quality.code_smells = results.iter()
            .map(|r| r.metrics.quality.code_smells)
            .sum();

        aggregated.quality.security_issues = results.iter()
            .map(|r| r.metrics.quality.security_issues)
            .sum();

        Ok(aggregated)
    }

    /// Extract languages from analysis results
    fn extract_languages(&self, results: &[AnalysisResult]) -> Vec<String> {
        let mut languages = std::collections::HashSet::new();
        for result in results {
            languages.extend(result.metadata.languages.iter().cloned());
        }
        languages.into_iter().collect()
    }
}

impl TrendAnalyzer {
    /// Check if we have sufficient data for trend analysis
    pub fn has_sufficient_data(&self) -> bool {
        // We'll implement this properly in the trends module
        true // Placeholder
    }
}

/// Mock syntax node for testing
#[derive(Debug)]
struct MockSyntaxNode {
    kind: String,
    text: String,
}

impl MockSyntaxNode {
    fn new(kind: &str, text: String) -> Self {
        Self {
            kind: kind.to_string(),
            text,
        }
    }
}

impl SyntaxNode for MockSyntaxNode {
    fn kind(&self) -> &str {
        &self.kind
    }

    fn text(&self) -> &str {
        &self.text
    }

    fn start_position(&self) -> codegraph_core::Position {
        codegraph_core::Position { row: 0, column: 0 }
    }

    fn end_position(&self) -> codegraph_core::Position {
        let lines = self.text.lines().count();
        let last_line_len = self.text.lines().last().map(|l| l.len()).unwrap_or(0);
        codegraph_core::Position {
            row: lines.saturating_sub(1) as u32,
            column: last_line_len as u32
        }
    }

    fn start_byte(&self) -> u32 {
        0
    }

    fn end_byte(&self) -> u32 {
        self.text.len() as u32
    }

    fn child(&self, _index: usize) -> Option<Box<dyn SyntaxNode>> {
        None
    }

    fn child_count(&self) -> usize {
        0
    }

    fn children(&self) -> Vec<Box<dyn SyntaxNode>> {
        Vec::new() // Simplified for mock
    }
}
