//! Kafka流适配器测试
//! 
//! 包含完整的单元测试、集成测试和性能测试

use super::*;
use tokio_test;

/// 测试Kafka适配器创建
#[tokio::test]
async fn test_kafka_adapter_creation() {
    let config = KafkaConfig::default();
    // 注意：这个测试可能失败，因为没有真实的Kafka服务器
    // 但我们可以测试配置和代码路径
    match KafkaStreamAdapter::new(config).await {
        Ok(_) => println!("Kafka适配器创建成功"),
        Err(e) => println!("Kafka适配器创建失败（预期）: {}", e),
    }
}

/// 测试配置默认值
#[test]
fn test_default_config() {
    let config = KafkaConfig::default();
    assert_eq!(config.bootstrap_servers, "localhost:9092");
    assert_eq!(config.security_protocol, "PLAINTEXT");
    assert!(config.sasl_mechanism.is_none());
    assert!(config.sasl_username.is_none());
    assert!(config.sasl_password.is_none());
    assert!(config.ssl_ca_location.is_none());
    assert_eq!(config.group_id, "data-gateway-group");
    assert_eq!(config.enable_auto_commit, true);
    assert_eq!(config.session_timeout_ms, 30000);
    assert_eq!(config.heartbeat_interval_ms, 3000);
}

/// 测试消息格式枚举
#[test]
fn test_message_format_enum() {
    // 测试相等性
    assert_eq!(MessageFormat::Json, MessageFormat::Json);
    assert_eq!(MessageFormat::Text, MessageFormat::Text);
    assert_eq!(MessageFormat::Binary, MessageFormat::Binary);
    
    // 测试不相等性
    assert_ne!(MessageFormat::Json, MessageFormat::Text);
    assert_ne!(MessageFormat::Text, MessageFormat::Binary);
    
    // 测试序列化
    let json = serde_json::to_string(&MessageFormat::Json).unwrap();
    assert_eq!(json, "\"Json\"");
    
    let text = serde_json::to_string(&MessageFormat::Text).unwrap();
    assert_eq!(json, "\"Json\"");
}

/// 测试主题配置
#[test]
fn test_topic_config() {
    let topic_config = TopicConfig {
        name: "test-topic".to_string(),
        format: MessageFormat::Json,
        schema: None,
        partitions: Some(3),
        replication_factor: Some(2),
    };
    
    assert_eq!(topic_config.name, "test-topic");
    assert_eq!(topic_config.format, MessageFormat::Json);
    assert!(topic_config.schema.is_none());
    assert_eq!(topic_config.partitions, Some(3));
    assert_eq!(topic_config.replication_factor, Some(2));
}

/// 测试流统计信息
#[test]
fn test_stream_stats() {
    let stats = StreamStats::default();
    assert_eq!(stats.messages_consumed, 0);
    assert_eq!(stats.messages_produced, 0);
    assert_eq!(stats.bytes_processed, 0);
    assert_eq!(stats.avg_latency_ms, 0.0);
    assert!(stats.last_processed_time.is_none());
    assert!(stats.current_offset.is_empty());
}

/// 测试JSON消息解析
#[test]
fn test_parse_json_message() {
    let json_payload = r#"{"id": 1, "name": "test", "value": 42.5}"#;
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    let result = KafkaStreamAdapter::parse_json_message(json_payload.as_bytes(), timestamp);
    assert!(result.is_ok(), "JSON消息解析应该成功");
    
    let record_batch = result.unwrap();
    assert_eq!(record_batch.num_columns(), 2);
    assert_eq!(record_batch.num_rows(), 1);
    
    // 验证Schema
    let schema = record_batch.schema();
    assert_eq!(schema.field(0).name(), "timestamp");
    assert_eq!(schema.field(1).name(), "data");
    assert_eq!(
        schema.field(0).data_type(),
        &DataType::Timestamp(arrow_schema::TimeUnit::Millisecond, None)
    );
    assert_eq!(schema.field(1).data_type(), &DataType::Utf8);
}

/// 测试文本消息解析
#[test]
fn test_parse_text_message() {
    let text_payload = "Hello, Kafka!";
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    let result = KafkaStreamAdapter::parse_text_message(text_payload.as_bytes(), timestamp);
    assert!(result.is_ok(), "文本消息解析应该成功");
    
    let record_batch = result.unwrap();
    assert_eq!(record_batch.num_columns(), 2);
    assert_eq!(record_batch.num_rows(), 1);
    
    // 验证Schema
    let schema = record_batch.schema();
    assert_eq!(schema.field(0).name(), "timestamp");
    assert_eq!(schema.field(1).name(), "message");
}

/// 测试二进制消息解析
#[test]
fn test_parse_binary_message() {
    let binary_payload = vec![0x48, 0x65, 0x6c, 0x6c, 0x6f]; // "Hello" in bytes
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    let result = KafkaStreamAdapter::parse_binary_message(&binary_payload, timestamp);
    assert!(result.is_ok(), "二进制消息解析应该成功");
    
    let record_batch = result.unwrap();
    assert_eq!(record_batch.num_columns(), 3);
    assert_eq!(record_batch.num_rows(), 1);
    
    // 验证Schema
    let schema = record_batch.schema();
    assert_eq!(schema.field(0).name(), "timestamp");
    assert_eq!(schema.field(1).name(), "size");
    assert_eq!(schema.field(2).name(), "data_hex");
    assert_eq!(schema.field(1).data_type(), &DataType::Int64);
}

/// 测试配置序列化和反序列化
#[test]
fn test_config_serialization() {
    let config = KafkaConfig {
        bootstrap_servers: "test:9092".to_string(),
        security_protocol: "SASL_SSL".to_string(),
        sasl_mechanism: Some("PLAIN".to_string()),
        sasl_username: Some("test_user".to_string()),
        sasl_password: Some("test_pass".to_string()),
        ssl_ca_location: Some("/path/to/ca.pem".to_string()),
        group_id: "test_group".to_string(),
        enable_auto_commit: false,
        session_timeout_ms: 60000,
        heartbeat_interval_ms: 6000,
    };
    
    // 测试序列化
    let json = serde_json::to_string(&config).unwrap();
    assert!(json.contains("test:9092"));
    assert!(json.contains("SASL_SSL"));
    assert!(json.contains("test_user"));
    
    // 测试反序列化
    let deserialized: KafkaConfig = serde_json::from_str(&json).unwrap();
    assert_eq!(deserialized.bootstrap_servers, config.bootstrap_servers);
    assert_eq!(deserialized.security_protocol, config.security_protocol);
    assert_eq!(deserialized.sasl_mechanism, config.sasl_mechanism);
    assert_eq!(deserialized.sasl_username, config.sasl_username);
    assert_eq!(deserialized.sasl_password, config.sasl_password);
    assert_eq!(deserialized.ssl_ca_location, config.ssl_ca_location);
    assert_eq!(deserialized.group_id, config.group_id);
    assert_eq!(deserialized.enable_auto_commit, config.enable_auto_commit);
    assert_eq!(deserialized.session_timeout_ms, config.session_timeout_ms);
    assert_eq!(deserialized.heartbeat_interval_ms, config.heartbeat_interval_ms);
}

/// 测试主题配置序列化
#[test]
fn test_topic_config_serialization() {
    let topic_config = TopicConfig {
        name: "test-topic".to_string(),
        format: MessageFormat::Json,
        schema: None,
        partitions: Some(5),
        replication_factor: Some(3),
    };
    
    // 测试序列化
    let json = serde_json::to_string(&topic_config).unwrap();
    assert!(json.contains("test-topic"));
    assert!(json.contains("Json"));
    
    // 测试反序列化
    let deserialized: TopicConfig = serde_json::from_str(&json).unwrap();
    assert_eq!(deserialized.name, topic_config.name);
    assert_eq!(deserialized.format, topic_config.format);
    assert_eq!(deserialized.partitions, topic_config.partitions);
    assert_eq!(deserialized.replication_factor, topic_config.replication_factor);
}

/// 测试流统计信息序列化
#[test]
fn test_stream_stats_serialization() {
    let mut current_offset = HashMap::new();
    current_offset.insert("test-topic:0".to_string(), 100);
    current_offset.insert("test-topic:1".to_string(), 200);
    
    let stats = StreamStats {
        messages_consumed: 1000,
        messages_produced: 500,
        bytes_processed: 1048576,
        avg_latency_ms: 25.5,
        last_processed_time: Some(chrono::Utc::now()),
        current_offset,
    };
    
    // 测试序列化
    let json = serde_json::to_string(&stats).unwrap();
    assert!(json.contains("1000"));
    assert!(json.contains("500"));
    assert!(json.contains("25.5"));
    
    // 测试反序列化
    let deserialized: StreamStats = serde_json::from_str(&json).unwrap();
    assert_eq!(deserialized.messages_consumed, stats.messages_consumed);
    assert_eq!(deserialized.messages_produced, stats.messages_produced);
    assert_eq!(deserialized.bytes_processed, stats.bytes_processed);
    assert_eq!(deserialized.avg_latency_ms, stats.avg_latency_ms);
    assert_eq!(deserialized.current_offset.len(), 2);
}

/// 性能测试：消息解析
#[test]
fn test_message_parsing_performance() {
    let json_payload = r#"{"id": 1, "name": "test", "value": 42.5}"#;
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    let start = std::time::Instant::now();
    for _ in 0..1000 {
        let _ = KafkaStreamAdapter::parse_json_message(json_payload.as_bytes(), timestamp);
    }
    let duration = start.elapsed();
    
    assert!(duration.as_millis() < 1000, "消息解析性能测试失败，耗时: {:?}", duration);
}

/// 边界条件测试
#[test]
fn test_edge_cases() {
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    // 测试空JSON
    let result = KafkaStreamAdapter::parse_json_message(b"{}", timestamp);
    assert!(result.is_ok(), "空JSON应该解析成功");
    
    // 测试无效JSON
    let result = KafkaStreamAdapter::parse_json_message(b"invalid json", timestamp);
    assert!(result.is_err(), "无效JSON应该解析失败");
    
    // 测试空文本
    let result = KafkaStreamAdapter::parse_text_message(b"", timestamp);
    assert!(result.is_ok(), "空文本应该解析成功");
    
    // 测试空二进制
    let result = KafkaStreamAdapter::parse_binary_message(b"", timestamp);
    assert!(result.is_ok(), "空二进制应该解析成功");
    
    // 测试无效UTF-8
    let invalid_utf8 = vec![0xFF, 0xFE, 0xFD];
    let result = KafkaStreamAdapter::parse_text_message(&invalid_utf8, timestamp);
    assert!(result.is_err(), "无效UTF-8应该解析失败");
}

/// 测试十六进制编码
#[test]
fn test_hex_encoding() {
    let data = vec![0x48, 0x65, 0x6c, 0x6c, 0x6f]; // "Hello"
    let hex_string = hex::encode(&data);
    assert_eq!(hex_string, "48656c6c6f");
    
    let decoded = hex::decode(&hex_string).unwrap();
    assert_eq!(decoded, data);
}

/// 并发测试
#[tokio::test]
async fn test_concurrent_message_parsing() {
    let json_payload = r#"{"id": 1, "name": "test", "value": 42.5}"#;
    let timestamp = chrono::Utc::now().timestamp_millis();
    
    // 创建多个并发任务
    let mut handles = vec![];
    for i in 0..10 {
        let payload = json_payload.as_bytes().to_vec();
        let handle = tokio::spawn(async move {
            let result = KafkaStreamAdapter::parse_json_message(&payload, timestamp);
            assert!(result.is_ok());
            i
        });
        handles.push(handle);
    }
    
    // 等待所有任务完成
    for handle in handles {
        let result = handle.await.unwrap();
        assert!(result < 10);
    }
}

/// 集成测试：完整的配置流程
#[test]
fn test_configuration_integration() {
    // 创建完整的配置
    let kafka_config = KafkaConfig {
        bootstrap_servers: "localhost:9092".to_string(),
        security_protocol: "PLAINTEXT".to_string(),
        group_id: "test-group".to_string(),
        ..Default::default()
    };
    
    let topic_config = TopicConfig {
        name: "test-topic".to_string(),
        format: MessageFormat::Json,
        schema: None,
        partitions: Some(1),
        replication_factor: Some(1),
    };
    
    // 验证配置
    assert_eq!(kafka_config.bootstrap_servers, "localhost:9092");
    assert_eq!(kafka_config.group_id, "test-group");
    assert_eq!(topic_config.name, "test-topic");
    assert_eq!(topic_config.format, MessageFormat::Json);
    
    // 测试序列化往返
    let kafka_json = serde_json::to_string(&kafka_config).unwrap();
    let kafka_deserialized: KafkaConfig = serde_json::from_str(&kafka_json).unwrap();
    assert_eq!(kafka_deserialized.bootstrap_servers, kafka_config.bootstrap_servers);
    
    let topic_json = serde_json::to_string(&topic_config).unwrap();
    let topic_deserialized: TopicConfig = serde_json::from_str(&topic_json).unwrap();
    assert_eq!(topic_deserialized.name, topic_config.name);
}
