package com.datagateway.component;

import com.datagateway.model.ProcessedData;
import com.datagateway.component.DataCompressionManager;
import com.datagateway.component.DataSerializationManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

/**
 * 数据缓冲区组件
 * 用于批量收集和缓存处理后的数据，避免Hive小文件问题
 * 
 * @author Data Gateway Team
 * @version 1.0.0
 */
@Component
public class DataBuffer {

    private static final Logger logger = LoggerFactory.getLogger(DataBuffer.class);

    @Autowired
    @Qualifier("hiveWriteExecutor")
    private java.util.concurrent.Executor hiveWriteExecutor;

    @Autowired
    private HiveWriter hiveWriter;

    @Autowired
    private RetryQueue retryQueue;

    @Autowired
    private AlertManager alertManager;

    @Autowired
    private SystemMonitor systemMonitor;

    @Autowired
    private DataQualityChecker dataQualityChecker;

    @Autowired
    private DataCompressionManager compressionManager;

    @Autowired
    private DataSerializationManager serializationManager;

    /**
     * 数据缓冲区队列
     */
    private final BlockingQueue<ProcessedData> dataQueue = new LinkedBlockingQueue<>();
    
    /**
     * 当前批次数据列表
     */
    private final List<ProcessedData> currentBatch = new ArrayList<>();
    
    /**
     * 批次开始时间
     */
    private volatile long batchStartTime = System.currentTimeMillis();
    
    /**
     * 处理的数据总数
     */
    private final AtomicLong totalProcessedCount = new AtomicLong(0);
    
    /**
     * 批次计数器
     */
    private final AtomicLong batchCounter = new AtomicLong(0);

    /**
     * 数据压缩启用状态
     */
    private boolean compressionEnabled = true;

    /**
     * 数据序列化优化启用状态
     */
    private boolean serializationEnabled = true;

    /**
     * 更新批处理大小
     * 
     * @param batchSize 新的批处理大小
     */
    public void updateBatchSize(int batchSize) {
        this.batchSize = batchSize;
        logger.info("数据缓冲区批处理大小已更新: {}", batchSize);
    }

    /**
     * 启用/禁用数据压缩
     * 
     * @param compressionEnabled 是否启用压缩
     */
    public void setCompressionEnabled(boolean compressionEnabled) {
        this.compressionEnabled = compressionEnabled;
        logger.info("数据压缩状态已更新: {}", compressionEnabled);
    }

    /**
     * 启用/禁用数据序列化优化
     * 
     * @param serializationEnabled 是否启用序列化优化
     */
    public void setSerializationEnabled(boolean serializationEnabled) {
        this.serializationEnabled = serializationEnabled;
        logger.info("数据序列化优化状态已更新: {}", serializationEnabled);
    }

    /**
     * 添加处理后的数据到缓冲区
     * 
     * @param data 处理后的数据
     */
    public void addData(ProcessedData data) {
        try {
            // 执行数据质量检查
            DataQualityChecker.QualityCheckResult qualityResult = dataQualityChecker.checkDataQuality(data);
            
            // 根据质量检查结果决定是否处理数据
            if (qualityResult.getResult() == DataQualityChecker.QualityResult.FAIL) {
                logger.warn("数据质量检查失败，跳过处理: dataId={}, issues={}", 
                           data.getId(), qualityResult.getIssues().size());
                data.setStatus(ProcessedData.ProcessStatus.FAILED);
                data.setErrorMessage("数据质量检查失败: " + qualityResult.getIssues().get(0).getMessage());
                return;
            } else if (qualityResult.getResult() == DataQualityChecker.QualityResult.WARN) {
                logger.warn("数据质量检查警告: dataId={}, issues={}", 
                           data.getId(), qualityResult.getIssues().size());
            }
            
            // 将数据添加到队列
            dataQueue.offer(data, 100, TimeUnit.MILLISECONDS);
            
            // 异步处理数据
            processDataAsync();
            
        } catch (InterruptedException e) {
            logger.error("添加数据到缓冲区时被中断", e);
            Thread.currentThread().interrupt();
        }
    }

    /**
     * 异步处理数据
     */
    @Async("dataProcessingExecutor")
    public void processDataAsync() {
        try {
            // 从队列中取出数据
            ProcessedData data = dataQueue.poll(100, TimeUnit.MILLISECONDS);
            if (data != null) {
                synchronized (currentBatch) {
                    currentBatch.add(data);
                    
                    // 检查是否需要触发批次处理
                    if (shouldTriggerBatch()) {
                        triggerBatchProcess();
                    }
                }
            }
        } catch (InterruptedException e) {
            logger.error("异步处理数据时被中断", e);
            Thread.currentThread().interrupt();
        }
    }

    /**
     * 判断是否应该触发批次处理
     * 
     * @return true如果应该触发批次处理
     */
    private boolean shouldTriggerBatch() {
        // 检查批次大小
        if (currentBatch.size() >= 1000) {
            logger.info("达到批次大小限制，触发批次处理。当前批次大小: {}", currentBatch.size());
            return true;
        }
        
        // 检查时间窗口
        long currentTime = System.currentTimeMillis();
        if (currentTime - batchStartTime >= 30000) { // 30秒
            logger.info("达到时间窗口限制，触发批次处理。批次时间: {}ms", currentTime - batchStartTime);
            return true;
        }
        
        return false;
    }

    /**
     * 触发批次处理
     */
    private void triggerBatchProcess() {
        if (currentBatch.isEmpty()) {
            return;
        }
        
        // 创建批次副本
        List<ProcessedData> batchToProcess = new ArrayList<>(currentBatch);
        currentBatch.clear();
        
        // 重置批次开始时间
        batchStartTime = System.currentTimeMillis();
        
        // 异步写入Hive
        writeToHiveAsync(batchToProcess);
        
        // 更新统计信息
        totalProcessedCount.addAndGet(batchToProcess.size());
        batchCounter.incrementAndGet();
        
        logger.info("触发批次处理，批次大小: {}, 总处理数量: {}, 批次号: {}", 
                   batchToProcess.size(), totalProcessedCount.get(), batchCounter.get());
    }

    /**
     * 异步写入Hive
     * 
     * @param batchData 批次数据
     */
    @Async("hiveWriteExecutor")
    public void writeToHiveAsync(List<ProcessedData> batchData) {
        try {
            // 调用Hive写入组件
            hiveWriter.writeBatch(batchData);
            
            logger.info("成功写入Hive，批次大小: {}", batchData.size());
            
            // 记录成功消息
            systemMonitor.recordProcessedMessage(true);
            
        } catch (Exception e) {
            logger.error("写入Hive失败，批次大小: {}", batchData.size(), e);
            
            // 发送Hive写入错误告警
            alertManager.sendHiveWriterAlert(e.getMessage(), batchData.size());
            
            // 将失败的数据添加到重试队列
            for (ProcessedData data : batchData) {
                if (data.getStatus() == ProcessedData.ProcessStatus.SUCCESS) {
                    retryQueue.addToRetryQueue(data, "Hive写入失败: " + e.getMessage());
                }
            }
            
            // 记录失败消息
            systemMonitor.recordProcessedMessage(false);
        }
    }

    /**
     * 强制刷新缓冲区
     * 将当前所有数据写入Hive
     */
    public void flush() {
        synchronized (currentBatch) {
            if (!currentBatch.isEmpty()) {
                triggerBatchProcess();
            }
        }
    }

    /**
     * 获取缓冲区状态信息
     * 
     * @return 缓冲区状态
     */
    public BufferStatus getStatus() {
        return new BufferStatus(
            dataQueue.size(),
            currentBatch.size(),
            totalProcessedCount.get(),
            batchCounter.get(),
            System.currentTimeMillis() - batchStartTime
        );
    }

    /**
     * 缓冲区状态类
     */
    public static class BufferStatus {
        private final int queueSize;
        private final int currentBatchSize;
        private final long totalProcessedCount;
        private final long batchCount;
        private final long currentBatchTime;

        public BufferStatus(int queueSize, int currentBatchSize, long totalProcessedCount, 
                          long batchCount, long currentBatchTime) {
            this.queueSize = queueSize;
            this.currentBatchSize = currentBatchSize;
            this.totalProcessedCount = totalProcessedCount;
            this.batchCount = batchCount;
            this.currentBatchTime = currentBatchTime;
        }

        // Getter方法
        public int getQueueSize() { return queueSize; }
        public int getCurrentBatchSize() { return currentBatchSize; }
        public long getTotalProcessedCount() { return totalProcessedCount; }
        public long getBatchCount() { return batchCount; }
        public long getCurrentBatchTime() { return currentBatchTime; }

        @Override
        public String toString() {
            return String.format("BufferStatus{queueSize=%d, currentBatchSize=%d, " +
                               "totalProcessedCount=%d, batchCount=%d, currentBatchTime=%dms}",
                               queueSize, currentBatchSize, totalProcessedCount, batchCount, currentBatchTime);
        }
    }
}
