use codegraph_core::{PerformanceMonitor, MonitorConfig};
use std::time::Duration;

#[tokio::test]
async fn test_performance_monitor_creation() {
    let monitor = PerformanceMonitor::new();
    
    // Test that we can get an initial performance report
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    assert_eq!(report.parse_stats.total_operations, 0);
    assert_eq!(report.graph_update_stats.total_updates, 0);
    assert_eq!(report.error_stats.total_errors, 0);
    
    println!("Performance monitor created successfully");
    println!("Initial uptime: {} seconds", report.uptime_seconds);
}

#[tokio::test]
async fn test_performance_monitor_with_config() {
    let config = MonitorConfig {
        max_samples: 100,
        sampling_interval_ms: 500,
        enable_profiling: true,
        enable_memory_tracking: true,
        enable_io_tracking: false,
    };
    
    let monitor = PerformanceMonitor::with_config(config);
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    assert_eq!(report.parse_stats.total_operations, 0);
    println!("Performance monitor with custom config created successfully");
}

#[tokio::test]
async fn test_parse_operation_recording() {
    let monitor = PerformanceMonitor::new();
    
    // Record some parse operations
    monitor.record_parse_operation(
        Duration::from_millis(100),
        1024,
        "python".to_string(),
        true,
        50,
        None,
    ).await.expect("Failed to record parse operation");
    
    monitor.record_parse_operation(
        Duration::from_millis(200),
        2048,
        "rust".to_string(),
        true,
        75,
        None,
    ).await.expect("Failed to record parse operation");
    
    monitor.record_parse_operation(
        Duration::from_millis(50),
        512,
        "javascript".to_string(),
        false,
        0,
        Some("Parse error".to_string()),
    ).await.expect("Failed to record parse operation");
    
    // Get performance report
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    println!("Parse operation recording test:");
    println!("  Total operations: {}", report.parse_stats.total_operations);
    println!("  Successful operations: {}", report.parse_stats.successful_operations);
    println!("  Failed operations: {}", report.parse_stats.failed_operations);
    println!("  Average duration: {:.2}ms", report.parse_stats.average_duration_ms);
    println!("  Min duration: {}ms", report.parse_stats.min_duration_ms);
    println!("  Max duration: {}ms", report.parse_stats.max_duration_ms);
    println!("  Total bytes parsed: {}", report.parse_stats.total_bytes_parsed);
    println!("  Operations per second: {:.2}", report.parse_stats.operations_per_second);
    
    assert_eq!(report.parse_stats.total_operations, 3);
    assert_eq!(report.parse_stats.successful_operations, 2);
    assert_eq!(report.parse_stats.failed_operations, 1);
    assert_eq!(report.parse_stats.total_bytes_parsed, 1024 + 2048 + 512);
    assert_eq!(report.parse_stats.min_duration_ms, 50);
    assert_eq!(report.parse_stats.max_duration_ms, 200);
}

#[tokio::test]
async fn test_graph_update_recording() {
    let monitor = PerformanceMonitor::new();
    
    // Record some graph update operations
    monitor.record_graph_update(
        Duration::from_millis(150),
        10, // nodes_added
        2,  // nodes_removed
        5,  // nodes_modified
        15, // edges_added
        3,  // edges_removed
        true,
    ).await.expect("Failed to record graph update");
    
    monitor.record_graph_update(
        Duration::from_millis(75),
        5,  // nodes_added
        1,  // nodes_removed
        3,  // nodes_modified
        8,  // edges_added
        2,  // edges_removed
        true,
    ).await.expect("Failed to record graph update");
    
    // Get performance report
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    println!("Graph update recording test:");
    println!("  Total updates: {}", report.graph_update_stats.total_updates);
    println!("  Successful updates: {}", report.graph_update_stats.successful_updates);
    println!("  Failed updates: {}", report.graph_update_stats.failed_updates);
    println!("  Average duration: {:.2}ms", report.graph_update_stats.average_duration_ms);
    println!("  Total nodes processed: {}", report.graph_update_stats.total_nodes_processed);
    println!("  Total edges processed: {}", report.graph_update_stats.total_edges_processed);
    println!("  Updates per second: {:.2}", report.graph_update_stats.updates_per_second);
    
    assert_eq!(report.graph_update_stats.total_updates, 2);
    assert_eq!(report.graph_update_stats.successful_updates, 2);
    assert_eq!(report.graph_update_stats.failed_updates, 0);
    assert_eq!(report.graph_update_stats.total_nodes_processed, (10+2+5) + (5+1+3));
    assert_eq!(report.graph_update_stats.total_edges_processed, (15+3) + (8+2));
}

#[tokio::test]
async fn test_custom_metrics() {
    let monitor = PerformanceMonitor::new();
    
    // Record some custom metrics
    monitor.record_custom_metric("query_latency".to_string(), 45.5).await
        .expect("Failed to record custom metric");
    
    monitor.record_custom_metric("cache_hit_rate".to_string(), 0.85).await
        .expect("Failed to record custom metric");
    
    monitor.record_custom_metric("query_latency".to_string(), 32.1).await
        .expect("Failed to record custom metric");
    
    // Custom metrics are recorded but not included in the standard report
    // In a full implementation, they would be accessible through a separate API
    
    println!("Custom metrics recording test completed");
    println!("Recorded query_latency and cache_hit_rate metrics");
}

#[tokio::test]
async fn test_memory_and_system_metrics() {
    let monitor = PerformanceMonitor::new();
    
    // Record memory usage
    monitor.record_memory_usage().await.expect("Failed to record memory usage");
    
    // Record system metrics
    monitor.record_system_metrics().await.expect("Failed to record system metrics");
    
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    println!("Memory and system metrics test:");
    println!("  Current memory usage: {} bytes", report.memory_stats.current_usage_bytes);
    println!("  Peak memory usage: {} bytes", report.memory_stats.peak_usage_bytes);
    println!("  Average CPU: {:.2}%", report.system_stats.average_cpu_percent);
    println!("  Peak CPU: {:.2}%", report.system_stats.peak_cpu_percent);
    
    // Note: These are currently returning 0 as they're not fully implemented
    // but the framework is in place
}

#[tokio::test]
async fn test_error_tracking() {
    let monitor = PerformanceMonitor::new();
    
    // Record operations with errors
    monitor.record_parse_operation(
        Duration::from_millis(100),
        1024,
        "python".to_string(),
        false,
        0,
        Some("Syntax error".to_string()),
    ).await.expect("Failed to record parse operation");
    
    monitor.record_graph_update(
        Duration::from_millis(50),
        0, 0, 0, 0, 0,
        false,
    ).await.expect("Failed to record graph update");
    
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    println!("Error tracking test:");
    println!("  Total errors: {}", report.error_stats.total_errors);
    println!("  Error rate: {:.2}%", report.error_stats.error_rate_percent);
    println!("  Errors by type: {:?}", report.error_stats.errors_by_type);
    
    assert!(report.error_stats.total_errors > 0);
    assert!(report.error_stats.errors_by_type.contains_key("parse_error"));
    assert!(report.error_stats.errors_by_type.contains_key("graph_update_error"));
}

#[tokio::test]
async fn test_operation_statistics() {
    let monitor = PerformanceMonitor::new();
    
    // Record various operations
    monitor.record_parse_operation(
        Duration::from_millis(100),
        1024,
        "python".to_string(),
        true,
        50,
        None,
    ).await.expect("Failed to record parse operation");
    
    monitor.record_graph_update(
        Duration::from_millis(75),
        5, 1, 3, 8, 2,
        true,
    ).await.expect("Failed to record graph update");
    
    let report = monitor.get_performance_report().await.expect("Failed to get performance report");
    
    println!("Operation statistics test:");
    println!("  Total operations: {}", report.operation_stats.total_operations);
    println!("  Operations by type: {:?}", report.operation_stats.operations_by_type);
    println!("  Operations per minute: {:.2}", report.operation_stats.operations_per_minute);
    
    assert!(report.operation_stats.total_operations > 0);
    assert!(report.operation_stats.operations_by_type.contains_key("parse_success"));
    assert!(report.operation_stats.operations_by_type.contains_key("graph_update_success"));
}

#[tokio::test]
async fn test_metrics_clearing() {
    let monitor = PerformanceMonitor::new();
    
    // Record some operations
    monitor.record_parse_operation(
        Duration::from_millis(100),
        1024,
        "python".to_string(),
        true,
        50,
        None,
    ).await.expect("Failed to record parse operation");
    
    // Verify operations were recorded
    let report_before = monitor.get_performance_report().await.expect("Failed to get performance report");
    assert_eq!(report_before.parse_stats.total_operations, 1);
    
    // Clear metrics
    monitor.clear_metrics().await.expect("Failed to clear metrics");
    
    // Verify metrics were cleared
    let report_after = monitor.get_performance_report().await.expect("Failed to get performance report");
    assert_eq!(report_after.parse_stats.total_operations, 0);
    assert_eq!(report_after.error_stats.total_errors, 0);
    assert_eq!(report_after.operation_stats.total_operations, 0);
    
    println!("Metrics clearing test passed");
    println!("Operations before clear: {}", report_before.parse_stats.total_operations);
    println!("Operations after clear: {}", report_after.parse_stats.total_operations);
}
