package com.datagateway.route;

import com.datagateway.component.DataBuffer;
import com.datagateway.component.GroovyScriptEngine;
import com.datagateway.config.DataGatewayProperties;
import com.datagateway.model.ProcessedData;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.kafka.KafkaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.time.LocalDateTime;
import java.util.UUID;

/**
 * 数据处理路由
 * 使用Apache Camel实现Kafka到Hive的数据流处理
 * 
 * @author Data Gateway Team
 * @version 1.0.0
 */
@Component
public class DataProcessingRoute extends RouteBuilder {

    private static final Logger logger = LoggerFactory.getLogger(DataProcessingRoute.class);

    @Autowired
    private DataGatewayProperties properties;

    @Autowired
    private GroovyScriptEngine groovyScriptEngine;

    @Autowired
    private DataBuffer dataBuffer;

    @Autowired
    private ObjectMapper objectMapper;

    /**
     * 配置路由
     */
    @Override
    public void configure() throws Exception {
        
        // 配置Kafka消费者路由
        from(buildKafkaConsumerUri())
            .routeId("kafka-data-consumer")
            .description("从Kafka消费数据并进行处理")
            
            // 设置错误处理
            .errorHandler(deadLetterChannel("direct:error-handler")
                .maximumRedeliveries(3)
                .redeliveryDelay(1000)
                .retryAttemptedLogLevel(org.apache.camel.LoggingLevel.WARN))
            
            // 数据预处理
            .process(new DataPreProcessor())
            
            // 数据转换
            .process(new DataTransformProcessor())
            
            // 数据后处理
            .process(new DataPostProcessor())
            
            // 发送到数据缓冲区
            .to("direct:data-buffer")
            
            // 记录处理完成
            .log("数据处理完成: ${header.dataId}");

        // 数据缓冲区处理路由
        from("direct:data-buffer")
            .routeId("data-buffer-processor")
            .description("将处理后的数据发送到缓冲区")
            .process(new DataBufferProcessor());

        // 错误处理路由
        from("direct:error-handler")
            .routeId("error-handler")
            .description("处理数据流中的错误")
            .process(new ErrorHandlerProcessor());
    }

    /**
     * 构建Kafka消费者URI
     * 
     * @return Kafka消费者URI
     */
    private String buildKafkaConsumerUri() {
        StringBuilder uri = new StringBuilder("kafka:");
        
        // 添加主题
        String[] topics = properties.getKafka().getTopics();
        for (int i = 0; i < topics.length; i++) {
            if (i > 0) {
                uri.append(",");
            }
            uri.append(topics[i]);
        }
        
        // 添加配置参数
        uri.append("?brokers=").append("{{camel.component.kafka.brokers}}")
           .append("&groupId=").append(properties.getKafka().getGroupId())
           .append("&autoOffsetReset=latest")
           .append("&autoCommitEnable=true")
           .append("&autoCommitIntervalMs=1000")
           .append("&sessionTimeoutMs=").append(properties.getKafka().getSessionTimeoutMs())
           .append("&maxPollRecords=").append(properties.getKafka().getMaxPollRecords())
           .append("&consumersCount=").append(properties.getKafka().getConsumerCount())
           .append("&breakOnFirstError=false")
           .append("&keyDeserializer=org.apache.kafka.common.serialization.StringDeserializer")
           .append("&valueDeserializer=org.apache.kafka.common.serialization.StringDeserializer");
        
        return uri.toString();
    }

    /**
     * 数据预处理器
     */
    private class DataPreProcessor implements Processor {
        @Override
        public void process(Exchange exchange) throws Exception {
            long startTime = System.currentTimeMillis();
            
            // 获取Kafka消息头信息
            String topic = exchange.getIn().getHeader(KafkaConstants.TOPIC, String.class);
            Integer partition = exchange.getIn().getHeader(KafkaConstants.PARTITION, Integer.class);
            Long offset = exchange.getIn().getHeader(KafkaConstants.OFFSET, Long.class);
            String key = exchange.getIn().getHeader(KafkaConstants.KEY, String.class);
            
            // 生成数据ID
            String dataId = UUID.randomUUID().toString();
            
            // 设置消息头
            exchange.getIn().setHeader("dataId", dataId);
            exchange.getIn().setHeader("sourceTopic", topic);
            exchange.getIn().setHeader("partition", partition);
            exchange.getIn().setHeader("offset", offset);
            exchange.getIn().setHeader("messageKey", key);
            exchange.getIn().setHeader("startTime", startTime);
            
            logger.debug("数据预处理完成: dataId={}, topic={}, partition={}, offset={}", 
                        dataId, topic, partition, offset);
        }
    }

    /**
     * 数据转换处理器
     */
    private class DataTransformProcessor implements Processor {
        @Override
        public void process(Exchange exchange) throws Exception {
            String dataId = exchange.getIn().getHeader("dataId", String.class);
            String originalData = exchange.getIn().getBody(String.class);
            
            try {
                // 使用Groovy脚本转换数据
                String transformedData = groovyScriptEngine.transformData(originalData);
                
                            // 解析转换后的数据为Map
            @SuppressWarnings("unchecked")
            java.util.Map<String, Object> transformedMap = (java.util.Map<String, Object>) objectMapper.readValue(transformedData, java.util.Map.class);
                
                // 设置转换后的数据
                exchange.getIn().setHeader("transformedData", transformedMap);
                exchange.getIn().setHeader("transformSuccess", true);
                
                logger.debug("数据转换成功: dataId={}", dataId);
                
            } catch (Exception e) {
                logger.error("数据转换失败: dataId={}", dataId, e);
                exchange.getIn().setHeader("transformSuccess", false);
                exchange.getIn().setHeader("transformError", e.getMessage());
                throw e;
            }
        }
    }

    /**
     * 数据后处理器
     */
    private class DataPostProcessor implements Processor {
        @Override
        public void process(Exchange exchange) throws Exception {
            long startTime = exchange.getIn().getHeader("startTime", Long.class);
            long processingTime = System.currentTimeMillis() - startTime;
            
            // 设置处理时间
            exchange.getIn().setHeader("processingTimeMs", processingTime);
            exchange.getIn().setHeader("processTime", LocalDateTime.now());
            
            logger.debug("数据后处理完成: dataId={}, 处理耗时={}ms", 
                        exchange.getIn().getHeader("dataId"), processingTime);
        }
    }

    /**
     * 数据缓冲区处理器
     */
    private class DataBufferProcessor implements Processor {
        @Override
        public void process(Exchange exchange) throws Exception {
            // 创建ProcessedData对象
            ProcessedData processedData = new ProcessedData();
            
            // 设置基本信息
            processedData.setId(exchange.getIn().getHeader("dataId", String.class));
            processedData.setOriginalData(exchange.getIn().getBody(String.class));
            processedData.setTransformedData(exchange.getIn().getHeader("transformedData", java.util.Map.class));
            processedData.setSourceTopic(exchange.getIn().getHeader("sourceTopic", String.class));
            processedData.setPartition(exchange.getIn().getHeader("partition", Integer.class));
            processedData.setOffset(exchange.getIn().getHeader("offset", Long.class));
            processedData.setProcessTime(exchange.getIn().getHeader("processTime", LocalDateTime.class));
            processedData.setProcessingTimeMs(exchange.getIn().getHeader("processingTimeMs", Long.class));
            
            // 设置处理状态
            Boolean transformSuccess = exchange.getIn().getHeader("transformSuccess", Boolean.class);
            if (transformSuccess != null && transformSuccess) {
                processedData.setStatus(ProcessedData.ProcessStatus.SUCCESS);
            } else {
                processedData.setStatus(ProcessedData.ProcessStatus.FAILED);
                processedData.setErrorMessage(exchange.getIn().getHeader("transformError", String.class));
            }
            
            // 添加到数据缓冲区
            dataBuffer.addData(processedData);
            
            logger.debug("数据已添加到缓冲区: dataId={}", processedData.getId());
        }
    }

    /**
     * 错误处理器
     */
    private class ErrorHandlerProcessor implements Processor {
        @Override
        public void process(Exchange exchange) throws Exception {
            String dataId = exchange.getIn().getHeader("dataId", String.class);
            Exception exception = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class);
            
            logger.error("数据处理失败: dataId={}", dataId, exception);
            
            // 创建失败的处理数据
            ProcessedData failedData = new ProcessedData();
            failedData.setId(dataId != null ? dataId : UUID.randomUUID().toString());
            failedData.setOriginalData(exchange.getIn().getBody(String.class));
            failedData.setStatus(ProcessedData.ProcessStatus.FAILED);
            failedData.setErrorMessage(exception != null ? exception.getMessage() : "未知错误");
            failedData.setProcessTime(LocalDateTime.now());
            
            // 添加到数据缓冲区（用于错误统计）
            dataBuffer.addData(failedData);
        }
    }
}
