//! Kafka消费者实现
//! 
//! 提供高性能、可扩展的Kafka消息消费功能

use std::time::Duration;
use std::collections::HashMap;
use std::sync::Arc;
use rdkafka::{
    config::ClientConfig,
    consumer::{StreamConsumer, Consumer, CommitMode},
    error::KafkaError,
    message::{Message, BorrowedMessage},
    util::Timeout,
    TopicPartitionList,
};
use anyhow::{Result, anyhow};
use tracing::{info, error, warn, debug};
use tokio::sync::mpsc;
use async_trait::async_trait;

use crate::kafka::config::{KafkaConfig, ConsumerConfig};
use crate::kafka::messages::KafkaMessage;

/// 消息处理器特征
#[async_trait]
pub trait MessageHandler: Send + Sync {
    /// 处理消息
    async fn handle_message(&self, message: KafkaMessage) -> Result<()>;
    
    /// 处理错误消息（反序列化失败等）
    async fn handle_error(&self, topic: &str, partition: i32, offset: i64, error: &str) {
        error!("消息处理错误 - Topic: {}, Partition: {}, Offset: {}, Error: {}", 
               topic, partition, offset, error);
    }
}

/// Kafka消费者
pub struct KafkaConsumer {
    /// rdkafka消费者实例
    consumer: StreamConsumer,
    /// 配置
    config: KafkaConfig,
    /// 消息处理器
    handler: Arc<dyn MessageHandler>,
    /// 是否正在运行
    running: Arc<tokio::sync::RwLock<bool>>,
}

/// Kafka消费者构建器
pub struct KafkaConsumerBuilder {
    config: KafkaConfig,
    client_config: ClientConfig,
    handler: Option<Arc<dyn MessageHandler>>,
}

impl KafkaConsumerBuilder {
    /// 创建新的构建器
    pub fn new(config: KafkaConfig) -> Self {
        let mut client_config = ClientConfig::new();
        
        // 设置基础配置
        client_config.set("bootstrap.servers", &config.brokers.join(","));
        
        // 设置安全配置
        client_config.set("security.protocol", &config.security.protocol);
        if let Some(mechanism) = &config.security.sasl_mechanism {
            client_config.set("sasl.mechanism", mechanism);
        }
        if let Some(username) = &config.security.sasl_username {
            client_config.set("sasl.username", username);
        }
        if let Some(password) = &config.security.sasl_password {
            client_config.set("sasl.password", password);
        }
        
        // 设置消费者配置
        client_config.set("group.id", &config.consumer.group_id);
        client_config.set("auto.offset.reset", &config.consumer.auto_offset_reset);
        client_config.set("enable.auto.commit", &config.consumer.enable_auto_commit.to_string());
        client_config.set("auto.commit.interval.ms", &config.consumer.auto_commit_interval_ms.to_string());
        client_config.set("session.timeout.ms", &config.consumer.session_timeout_ms.to_string());
        client_config.set("max.poll.records", &config.consumer.max_poll_records.to_string());
        
        // 设置其他配置
        for (key, value) in &config.consumer.additional_properties {
            client_config.set(key, value);
        }
        
        Self {
            config,
            client_config,
            handler: None,
        }
    }
    
    /// 设置客户端ID
    pub fn client_id(mut self, client_id: &str) -> Self {
        self.client_config.set("client.id", client_id);
        self
    }
    
    /// 设置消息处理器
    pub fn handler<H: MessageHandler + 'static>(mut self, handler: H) -> Self {
        self.handler = Some(Arc::new(handler));
        self
    }
    
    /// 设置最大轮询间隔
    pub fn max_poll_interval_ms(mut self, interval_ms: u32) -> Self {
        self.client_config.set("max.poll.interval.ms", &interval_ms.to_string());
        self
    }
    
    /// 构建消费者
    pub fn build(self) -> Result<KafkaConsumer> {
        let handler = self.handler
            .ok_or_else(|| anyhow!("必须设置消息处理器"))?;
        
        info!("创建Kafka消费者，Group ID: {}, brokers: {:?}", 
              self.config.consumer.group_id, self.config.brokers);
        
        let consumer: StreamConsumer = self.client_config
            .create()
            .map_err(|e| anyhow!("创建Kafka消费者失败: {}", e))?;
        
        Ok(KafkaConsumer {
            consumer,
            config: self.config,
            handler,
            running: Arc::new(tokio::sync::RwLock::new(false)),
        })
    }
}

impl KafkaConsumer {
    /// 订阅主题并开始消费
    pub async fn subscribe_and_consume(&self, topics: &[&str]) -> Result<()> {
        info!("订阅Kafka主题: {:?}", topics);
        
        self.consumer.subscribe(topics)
            .map_err(|e| anyhow!("订阅主题失败: {}", e))?;
        
        {
            let mut running = self.running.write().await;
            *running = true;
        }
        
        self.consume_loop().await
    }
    
    /// 消费所有沙箱相关主题
    pub async fn consume_all_topics(&self) -> Result<()> {
        let topics = vec![
            self.config.topics.sandbox_events.as_str(),
            self.config.topics.execution_tasks.as_str(),
            self.config.topics.system_metrics.as_str(),
            self.config.topics.error_reports.as_str(),
            self.config.topics.health_checks.as_str(),
        ];
        
        self.subscribe_and_consume(&topics).await
    }
    
    /// 消费循环
    async fn consume_loop(&self) -> Result<()> {
        info!("开始Kafka消息消费循环");
        
        loop {
            // 检查是否应该停止
            {
                let running = self.running.read().await;
                if !*running {
                    info!("收到停止信号，退出消费循环");
                    break;
                }
            }
            
            // 接收消息
            match self.consumer.recv().await {
                Ok(message) => {
                    if let Err(e) = self.handle_kafka_message(&message).await {
                        error!("处理Kafka消息失败: {}", e);
                        // 继续处理下一条消息，不中断消费循环
                    }
                    
                    // 手动提交偏移量（如果禁用了自动提交）
                    if !self.config.consumer.enable_auto_commit {
                        if let Err(e) = self.consumer.commit_message(&message, CommitMode::Async) {
                            warn!("提交偏移量失败: {}", e);
                        }
                    }
                }
                Err(e) => {
                    error!("接收Kafka消息失败: {}", e);
                    tokio::time::sleep(Duration::from_secs(1)).await;
                }
            }
        }
        
        Ok(())
    }
    
    /// 处理单个Kafka消息
    async fn handle_kafka_message(&self, message: &BorrowedMessage<'_>) -> Result<()> {
        let topic = message.topic();
        let partition = message.partition();
        let offset = message.offset();
        
        debug!("收到Kafka消息 - Topic: {}, Partition: {}, Offset: {}", 
               topic, partition, offset);
        
        // 获取消息载荷
        let payload = match message.payload() {
            Some(p) => p,
            None => {
                warn!("收到空消息载荷 - Topic: {}, Partition: {}, Offset: {}", 
                      topic, partition, offset);
                return Ok(());
            }
        };
        
        // 解析JSON消息
        let payload_str = match std::str::from_utf8(payload) {
            Ok(s) => s,
            Err(e) => {
                let error_msg = format!("消息不是有效的UTF-8: {}", e);
                self.handler.handle_error(topic, partition, offset, &error_msg).await;
                return Ok(());
            }
        };
        
        // 反序列化消息
        let kafka_message = match KafkaMessage::from_json(payload_str) {
            Ok(msg) => msg,
            Err(e) => {
                let error_msg = format!("反序列化消息失败: {}", e);
                self.handler.handle_error(topic, partition, offset, &error_msg).await;
                return Ok(());
            }
        };
        
        // 处理消息
        self.handler.handle_message(kafka_message).await
    }
    
    /// 停止消费
    pub async fn stop(&self) -> Result<()> {
        info!("停止Kafka消费者");
        
        {
            let mut running = self.running.write().await;
            *running = false;
        }
        
        // 取消订阅
        self.consumer.unsubscribe();
        
        Ok(())
    }
    
    /// 获取消费者组成员信息
    pub async fn get_group_metadata(&self) -> Result<HashMap<String, String>> {
        let metadata = self.consumer.fetch_group_list(None, Duration::from_secs(10))
            .map_err(|e| anyhow!("获取消费者组元数据失败: {}", e))?;
        
        let mut result = HashMap::new();
        
        for group in metadata.groups() {
            result.insert(
                format!("group_{}", group.name()),
                format!("state: {:?}, members: {}", group.state(), group.members().len())
            );
        }
        
        Ok(result)
    }
    
    /// 获取主题分区偏移量信息
    pub async fn get_offsets_for_times(&self, topics: &[&str], timestamp: i64) -> Result<HashMap<String, Vec<(i32, i64)>>> {
        let mut topic_partition_list = TopicPartitionList::new();
        
        // 添加主题到查询列表
        for topic in topics {
            topic_partition_list.add_partition_offset(topic, 0, rdkafka::Offset::Offset(timestamp))
                .map_err(|e| anyhow!("添加主题分区失败: {}", e))?;
        }
        
        let offsets = self.consumer.offsets_for_times(topic_partition_list, Duration::from_secs(10))
            .map_err(|e| anyhow!("查询偏移量失败: {}", e))?;
        
        let mut result = HashMap::new();
        
        for element in offsets.elements() {
            let topic = element.topic();
            let partition = element.partition();
            
            let offset = match element.offset() {
                rdkafka::Offset::Offset(o) => o,
                _ => -1,
            };
            
            result.entry(topic.to_string())
                .or_insert_with(Vec::new)
                .push((partition, offset));
        }
        
        Ok(result)
    }
    
    /// 获取配置
    pub fn config(&self) -> &KafkaConfig {
        &self.config
    }
}

/// 默认消息处理器实现
pub struct DefaultMessageHandler {
    /// 组件名称
    component_name: String,
}

impl DefaultMessageHandler {
    pub fn new(component_name: &str) -> Self {
        Self {
            component_name: component_name.to_string(),
        }
    }
}

#[async_trait]
impl MessageHandler for DefaultMessageHandler {
    async fn handle_message(&self, message: KafkaMessage) -> Result<()> {
        info!("[{}] 收到消息: {} ({})", 
              self.component_name, 
              message.message_type(), 
              message.header().message_id);
        
        match message {
            KafkaMessage::SandboxEvent(event) => {
                info!("沙箱事件: {:?} - 沙箱ID: {}", event.event_type, event.sandbox_id);
            }
            KafkaMessage::ExecutionTask(task) => {
                info!("执行任务: {} - 语言: {:?}", task.task_id, task.execution_request.language);
            }
            KafkaMessage::SystemMetrics(metrics) => {
                info!("系统指标: {:?} - {} 个指标", metrics.metrics_type, metrics.metrics.len());
            }
            KafkaMessage::ErrorReport(error) => {
                warn!("错误报告: {:?} - {}", error.level, error.error_message);
            }
            KafkaMessage::HealthCheck(health) => {
                info!("健康检查: {} - {:?}", health.component, health.status);
            }
        }
        
        Ok(())
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use crate::kafka::messages::*;
    use chrono::Utc;
    use uuid::Uuid;

    struct TestMessageHandler {
        received_messages: Arc<tokio::sync::Mutex<Vec<String>>>,
    }

    impl TestMessageHandler {
        fn new() -> Self {
            Self {
                received_messages: Arc::new(tokio::sync::Mutex::new(Vec::new())),
            }
        }
        
        async fn get_received_messages(&self) -> Vec<String> {
            self.received_messages.lock().await.clone()
        }
    }

    #[async_trait]
    impl MessageHandler for TestMessageHandler {
        async fn handle_message(&self, message: KafkaMessage) -> Result<()> {
            let mut messages = self.received_messages.lock().await;
            messages.push(message.message_type().to_string());
            Ok(())
        }
    }

    #[tokio::test]
    async fn test_kafka_consumer_builder() {
        let config = KafkaConfig::default();
        let handler = TestMessageHandler::new();
        let builder = KafkaConsumerBuilder::new(config)
            .client_id("test-consumer")
            .handler(handler);
        
        // 注意: 这个测试需要实际的Kafka实例才能通过
        // let consumer = builder.build().unwrap();
        // assert_eq!(consumer.config().consumer.group_id, "koda-sandbox-manager");
    }

    #[test]
    fn test_default_message_handler() {
        let handler = DefaultMessageHandler::new("test-component");
        assert_eq!(handler.component_name, "test-component");
    }
}