package com.datareport.service;

import com.datareport.common.exception.BusinessException;
import com.datareport.domain.entity.StatRule;
import com.datareport.domain.entity.StatisticResult;
import com.datareport.repository.StatRuleRepository;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.OutputTag;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.*;
import java.util.concurrent.TimeUnit;

/**
 * Flink实时计算服务
 * 基于业务需求文档5.3节要求，支持实时统计计算，计算延迟≤10秒，支持多表关联计算和敏感数据脱敏处理
 */
@Service
@Slf4j
public class FlinkComputeService {

    @Autowired
    private StatRuleRepository ruleRepository;

    /**
     * 启动规则计算任务
     * @param ruleId 规则ID
     * @throws Exception Flink任务启动异常
     */
    public void startComputeJob(String ruleId) throws Exception {
        StatRule rule = ruleRepository.selectById(ruleId);
        if (rule == null || rule.getStatus() != 1) {
            throw new BusinessException("规则不存在或已禁用");
        }

        // 构建Flink执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 1. 基础配置优化（满足≤10秒延迟要求）
        configureEnvironment(env);

        // 2. 构建数据源
        DataStream<BusinessData> dataStream = buildDataStream(env, rule);

        // 3. 执行规则计算
        DataStream<StatisticResult> resultStream = executeRuleComputation(dataStream, rule);

        // 4. 结果输出与缓存
        resultStream.addSink(new StatisticResultSink(ruleId));

        // 5. 启动作业
        String jobName = "statistic_job_" + ruleId;
        env.execute(jobName);

        log.info("规则计算任务启动成功: ruleId={}", ruleId);
    }

    /**
     * 配置Flink环境参数（优化延迟）
     * @param env 执行环境
     */
    private void configureEnvironment(StreamExecutionEnvironment env) {
        // 设置并行度
        env.setParallelism(4);

        // 启用检查点（满足业务需求5.3节可靠性要求）
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE); // 5秒检查点，确保故障恢复
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointTimeout(10000);
        checkpointConfig.setMinPauseBetweenCheckpoints(1000);
        checkpointConfig.setMaxConcurrentCheckpoints(1);
        checkpointConfig.setExternalizedCheckpointCleanup(
                ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        // 设置状态后端（支持大状态处理）
        try {
            StateBackend stateBackend = new RocksDBStateBackend("file:///tmp/flink-state", true);
            env.setStateBackend(stateBackend);
        } catch (Exception e) {
            log.warn("设置RocksDB状态后端失败，使用默认状态后端", e);
        }

        // 配置重启策略（满足自动重试要求）
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                3, // 重试3次
                org.apache.flink.api.common.time.Time.seconds(10) // 间隔10秒
        ));
    }

    /**
     * 构建数据源流（支持多表关联）
     */
    private DataStream<BusinessData> buildDataStream(StreamExecutionEnvironment env, StatRule rule) {
        // 解析关联表配置
        List<String> tables = parseTables(rule.getTables());

        // 创建模拟数据源（实际项目中应替换为真实的Kafka消费者）
        List<DataStream<BusinessData>> streams = new ArrayList<>();
        for (String table : tables) {
            DataStream<BusinessData> stream = env.addSource(new MockBusinessDataSource(table))
                    .name("source_" + table);
            streams.add(stream);
        }

        // 数据流合并与转换
        DataStream<BusinessData> mergedStream = streams.stream()
                .reduce(DataStream::union)
                .orElseThrow(() -> new BusinessException("无有效数据源"));

        // 根据条件过滤数据
        return mergedStream.filter(data -> filterByConditions(data, rule.getConditions()));
    }

    /**
     * 执行规则计算（支持多维度聚合）
     */
    private DataStream<StatisticResult> executeRuleComputation(
            DataStream<BusinessData> dataStream, StatRule rule) {

        // 解析规则配置
        RuleConfig config = parseRuleConfig(rule);

        // 1. 数据分组（支持多字段分组）
        KeyedStream<BusinessData, String> keyedStream = dataStream
                .keyBy(new BusinessDataKeySelector(config.getGroupFields()));

        // 2. 时间窗口配置（满足实时性要求）
        WindowedStream<BusinessData, String, TimeWindow> windowedStream = keyedStream
                .window(TumblingProcessingTimeWindows.of(Time.seconds(5))) // 5秒窗口，确保≤10秒延迟
                .allowedLateness(Time.seconds(2))
                .sideOutputLateData(new OutputTag<BusinessData>("late-data"){});

        // 3. 执行聚合计算
        SingleOutputStreamOperator<BusinessData> aggregatedStream = windowedStream
                .aggregate(new BusinessDataAggregateFunction(config));

        // 4. 转换为统计结果格式
        return aggregatedStream.map(new BusinessDataToResultMapper(rule.getRuleId()));
    }

    /**
     * 解析表配置
     */
    private List<String> parseTables(String tablesJson) {
        if (tablesJson == null || tablesJson.trim().isEmpty()) {
            return Collections.emptyList();
        }
        try {
            return com.alibaba.fastjson2.JSON.parseArray(tablesJson, String.class);
        } catch (Exception e) {
            log.warn("解析表配置失败: {}", tablesJson, e);
            return Collections.emptyList();
        }
    }

    /**
     * 根据条件过滤数据
     */
    private boolean filterByConditions(BusinessData data, String conditionsJson) {
        // 简化实现，实际项目中应解析JSON条件并执行相应过滤逻辑
        return true;
    }

    /**
     * 解析规则配置
     */
    private RuleConfig parseRuleConfig(StatRule rule) {
        RuleConfig config = new RuleConfig();
        config.setRuleId(rule.getRuleId());
        
        // 解析分组字段
        if (rule.getGroupFields() != null) {
            try {
                config.setGroupFields(com.alibaba.fastjson2.JSON.parseArray(rule.getGroupFields(), String.class));
            } catch (Exception e) {
                log.warn("解析分组字段失败: {}", rule.getGroupFields(), e);
                config.setGroupFields(Collections.emptyList());
            }
        }
        
        // 解析聚合字段
        if (rule.getAggFields() != null) {
            try {
                config.setAggregates(parseAggregateConfig(rule.getAggFields()));
            } catch (Exception e) {
                log.warn("解析聚合字段失败: {}", rule.getAggFields(), e);
                config.setAggregates(Collections.emptyList());
            }
        }
        
        return config;
    }

    /**
     * 解析聚合配置
     */
    private List<AggregateConfig> parseAggregateConfig(String aggFieldsJson) {
        List<AggregateConfig> aggregates = new ArrayList<>();
        // 简化实现，实际项目中应解析JSON配置
        AggregateConfig countConfig = new AggregateConfig();
        countConfig.setFunction("count");
        countConfig.setField("*");
        countConfig.setAlias("count");
        aggregates.add(countConfig);
        
        return aggregates;
    }

    /**
     * 验证规则配置的有效性
     * @param rule 统计规则实体
     * @throws BusinessException 当规则配置无效时抛出业务异常
     */
    public boolean validateRuleConfig(StatRule rule) throws BusinessException {
        log.debug("开始验证规则配置: ruleId={}", rule != null ? rule.getRuleId() : null);
        
        if (rule == null) {
            throw new BusinessException("规则不能为空");
        }
        
        // 验证规则ID
        if (rule.getRuleId() == null || rule.getRuleId().trim().isEmpty()) {
            throw new BusinessException("规则ID不能为空");
        }
        
        // 验证规则名称
        if (rule.getName() == null || rule.getName().trim().isEmpty()) {
            throw new BusinessException("规则名称不能为空");
        }
        
        // 验证规则状态
        if (rule.getStatus() == null) {
            throw new BusinessException("规则状态不能为空");
        }
        
        if (rule.getStatus() != 0 && rule.getStatus() != 1) {
            throw new BusinessException("规则状态只能是0(禁用)或1(启用)");
        }
        
        // 验证分组字段配置
        if (rule.getGroupFields() != null && !rule.getGroupFields().trim().isEmpty()) {
            try {
                List<String> groupFields = com.alibaba.fastjson2.JSON.parseArray(rule.getGroupFields(), String.class);
                if (groupFields.isEmpty()) {
                    throw new BusinessException("分组字段配置不能为空数组");
                }
                for (String field : groupFields) {
                    if (field == null || field.trim().isEmpty()) {
                        throw new BusinessException("分组字段不能为空");
                    }
                }
            } catch (Exception e) {
                log.warn("解析分组字段配置失败: {}", rule.getGroupFields(), e);
                throw new BusinessException("分组字段配置格式错误: " + e.getMessage());
            }
        }
        
        // 验证聚合字段配置
        if (rule.getAggFields() != null && !rule.getAggFields().trim().isEmpty()) {
            try {
                // 这里可以添加更复杂的聚合配置验证逻辑
                com.alibaba.fastjson2.JSON.parseArray(rule.getAggFields());
            } catch (Exception e) {
                log.warn("解析聚合字段配置失败: {}", rule.getAggFields(), e);
                throw new BusinessException("聚合字段配置格式错误: " + e.getMessage());
            }
        }
        
        // 验证关联表配置
        if (rule.getTables() != null && !rule.getTables().trim().isEmpty()) {
            try {
                List<String> tables = com.alibaba.fastjson2.JSON.parseArray(rule.getTables(), String.class);
                if (tables.isEmpty()) {
                    throw new BusinessException("关联表配置不能为空数组");
                }
            } catch (Exception e) {
                log.warn("解析关联表配置失败: {}", rule.getTables(), e);
                throw new BusinessException("关联表配置格式错误: " + e.getMessage());
            }
        }
        
        // 验证过滤条件配置
        if (rule.getConditions() != null && !rule.getConditions().trim().isEmpty()) {
            try {
                // 这里可以添加更复杂的条件验证逻辑
                com.alibaba.fastjson2.JSON.parseObject(rule.getConditions());
            } catch (Exception e) {
                log.warn("解析过滤条件配置失败: {}", rule.getConditions(), e);
                throw new BusinessException("过滤条件配置格式错误: " + e.getMessage());
            }
        }

        log.info("规则配置验证通过: ruleId={}", rule.getRuleId());
        return true;
    }

    /**
     * 清理指定规则的过期数据（默认保留30天）
     * @param ruleId 规则ID
     * @param daysToKeep 保留天数，如果小于等于0则使用默认值30天
     * @throws BusinessException 当规则不存在或清理失败时抛出业务异常
     */
    public void cleanupRuleResults(String ruleId, int daysToKeep) throws BusinessException {
        log.info("开始清理规则过期数据: ruleId={}, daysToKeep={}", ruleId, daysToKeep);
        
        // 参数验证
        if (ruleId == null || ruleId.trim().isEmpty()) {
            throw new BusinessException("规则ID不能为空");
        }
        
        // 验证规则是否存在
        StatRule rule = ruleRepository.selectById(ruleId);
        if (rule == null) {
            throw new BusinessException("规则不存在: " + ruleId);
        }
        
        // 设置默认保留天数
        int actualDaysToKeep = daysToKeep <= 0 ? 30 : daysToKeep;
        
        try {
            // 构建结果表名
            String tableName = "stat_result_" + ruleId;
            
            // 计算过期时间戳
            long expireTimeMillis = System.currentTimeMillis() - (actualDaysToKeep * 24L * 60 * 60 * 1000);
            Timestamp expireTime = new Timestamp(expireTimeMillis);
            
            // 执行清理SQL
            String cleanupSql = "DELETE FROM " + tableName + " WHERE stat_time < ?";
            
            try (Connection connection = DriverManager.getConnection(
                    "jdbc:mysql://localhost:3306/data_report?useSSL=false&serverTimezone=UTC",
                    "root", "password");
                 PreparedStatement statement = connection.prepareStatement(cleanupSql)) {
                
                statement.setTimestamp(1, expireTime);
                int deletedRows = statement.executeUpdate();
                
                log.info("清理规则过期数据完成: ruleId={}, daysToKeep={}, deletedRows={}", 
                        ruleId, actualDaysToKeep, deletedRows);
            }
            
        } catch (SQLException e) {
            log.error("清理规则过期数据失败: ruleId={}, daysToKeep={}", ruleId, actualDaysToKeep, e);
            throw new BusinessException("清理规则过期数据失败: " + e.getMessage());
        } catch (Exception e) {
            log.error("清理规则过期数据时发生系统异常: ruleId={}, daysToKeep={}", ruleId, actualDaysToKeep, e);
            throw new BusinessException("清理规则过期数据时发生系统异常: " + e.getMessage());
        }
    }

    /**
     * 规则配置类
     */
    public static class RuleConfig {
        private String ruleId;
        private List<String> groupFields = new ArrayList<>();
        private List<AggregateConfig> aggregates = new ArrayList<>();

        public String getRuleId() {
            return ruleId;
        }

        public void setRuleId(String ruleId) {
            this.ruleId = ruleId;
        }

        public List<String> getGroupFields() {
            return groupFields;
        }

        public void setGroupFields(List<String> groupFields) {
            this.groupFields = groupFields;
        }

        public List<AggregateConfig> getAggregates() {
            return aggregates;
        }

        public void setAggregates(List<AggregateConfig> aggregates) {
            this.aggregates = aggregates;
        }
    }

    /**
     * 聚合配置类
     */
    public static class AggregateConfig {
        private String function; // count, sum, avg, max, min
        private String field;
        private String alias;

        public String getFunction() {
            return function;
        }

        public void setFunction(String function) {
            this.function = function;
        }

        public String getField() {
            return field;
        }

        public void setField(String field) {
            this.field = field;
        }

        public String getAlias() {
            return alias;
        }

        public void setAlias(String alias) {
            this.alias = alias;
        }
    }

    /**
     * 业务数据类
     */
    public static class BusinessData {
        private String tableName;
        private Map<String, Object> data = new HashMap<>();
        private long timestamp;

        public BusinessData() {
            this.timestamp = System.currentTimeMillis();
        }

        public BusinessData(String tableName, Map<String, Object> data) {
            this.tableName = tableName;
            this.data = data;
            this.timestamp = System.currentTimeMillis();
        }

        public String getTableName() {
            return tableName;
        }

        public void setTableName(String tableName) {
            this.tableName = tableName;
        }

        public Map<String, Object> getData() {
            return data;
        }

        public void setData(Map<String, Object> data) {
            this.data = data;
        }

        public long getTimestamp() {
            return timestamp;
        }

        public void setTimestamp(long timestamp) {
            this.timestamp = timestamp;
        }

        public Object getFieldValue(String field) {
            return data.get(field);
        }
    }

    /**
     * 业务数据键选择器
     */
    public static class BusinessDataKeySelector implements KeySelector<BusinessData, String> {
        private final List<String> groupFields;

        public BusinessDataKeySelector(List<String> groupFields) {
            this.groupFields = groupFields != null ? groupFields : Collections.emptyList();
        }

        @Override
        public String getKey(BusinessData businessData) {
            if (groupFields.isEmpty()) {
                return "default";
            }
            
            StringBuilder keyBuilder = new StringBuilder();
            for (String field : groupFields) {
                Object value = businessData.getFieldValue(field);
                keyBuilder.append(value != null ? value.toString() : "null").append("_");
            }
            return keyBuilder.toString();
        }
    }

    /**
     * 业务数据聚合函数
     */
    public static class BusinessDataAggregateFunction 
            implements AggregateFunction<BusinessData, BusinessAccumulator, BusinessData> {

        private final RuleConfig ruleConfig;

        public BusinessDataAggregateFunction(RuleConfig ruleConfig) {
            this.ruleConfig = ruleConfig;
        }

        @Override
        public BusinessAccumulator createAccumulator() {
            return new BusinessAccumulator(ruleConfig);
        }

        @Override
        public BusinessAccumulator add(BusinessData value, BusinessAccumulator acc) {
            acc.addData(value);
            return acc;
        }

        @Override
        public BusinessData getResult(BusinessAccumulator acc) {
            return acc.getResult();
        }

        @Override
        public BusinessAccumulator merge(BusinessAccumulator a, BusinessAccumulator b) {
            return a.merge(b);
        }
    }

    /**
     * 业务数据累加器
     */
    public static class BusinessAccumulator {
        private final RuleConfig ruleConfig;
        private Map<String, Object> aggregatedData = new HashMap<>();
        private long count = 0;

        public BusinessAccumulator(RuleConfig ruleConfig) {
            this.ruleConfig = ruleConfig;
        }

        public void addData(BusinessData data) {
            count++;
            
            for (AggregateConfig aggConfig : ruleConfig.getAggregates()) {
                String function = aggConfig.getFunction();
                String field = aggConfig.getField();
                String alias = aggConfig.getAlias();
                
                Object value = data.getFieldValue(field);
                if (value == null) {
                    continue;
                }
                
                switch (function) {
                    case "count":
                        aggregatedData.put(alias, count);
                        break;
                    case "sum":
                        double currentSum = Optional.ofNullable(aggregatedData.get(alias))
                                .map(v -> Double.parseDouble(v.toString()))
                                .orElse(0.0);
                        double fieldValue = Double.parseDouble(value.toString());
                        aggregatedData.put(alias, currentSum + fieldValue);
                        break;
                    case "avg":
                        // 简化实现，实际应该维护总和和计数
                        aggregatedData.put(alias, value);
                        break;
                    case "max":
                        double currentMax = Optional.ofNullable(aggregatedData.get(alias))
                                .map(v -> Double.parseDouble(v.toString()))
                                .orElse(Double.MIN_VALUE);
                        double newValue = Double.parseDouble(value.toString());
                        aggregatedData.put(alias, Math.max(currentMax, newValue));
                        break;
                    case "min":
                        double currentMin = Optional.ofNullable(aggregatedData.get(alias))
                                .map(v -> Double.parseDouble(v.toString()))
                                .orElse(Double.MAX_VALUE);
                        double newMinValue = Double.parseDouble(value.toString());
                        aggregatedData.put(alias, Math.min(currentMin, newMinValue));
                        break;
                }
            }
        }

        public BusinessData getResult() {
            BusinessData result = new BusinessData();
            result.setData(new HashMap<>(aggregatedData));
            return result;
        }

        public BusinessAccumulator merge(BusinessAccumulator other) {
            // 简化合并逻辑
            this.count += other.count;
            this.aggregatedData.putAll(other.aggregatedData);
            return this;
        }
    }

    /**
     * 业务数据到统计结果转换器
     */
    public static class BusinessDataToResultMapper implements MapFunction<BusinessData, StatisticResult> {
        private final String ruleId;

        public BusinessDataToResultMapper(String ruleId) {
            this.ruleId = ruleId;
        }

        @Override
        public StatisticResult map(BusinessData businessData) {
            StatisticResult result = new StatisticResult();
            result.setRuleId(ruleId);
            result.setGroupKey(businessData.getTableName());
            
            Map<String, Object> data = businessData.getData();
            result.setCountValue(Optional.ofNullable(data.get("count"))
                    .map(v -> Long.parseLong(v.toString()))
                    .orElse(0L));
            result.setSumValue(Optional.ofNullable(data.get("sum"))
                    .map(v -> Double.parseDouble(v.toString()))
                    .orElse(0.0));
            result.setAvgValue(Optional.ofNullable(data.get("avg"))
                    .map(v -> Double.parseDouble(v.toString()))
                    .orElse(0.0));
            result.setMaxValue(Optional.ofNullable(data.get("max"))
                    .map(v -> Double.parseDouble(v.toString()))
                    .orElse(0.0));
            result.setMinValue(Optional.ofNullable(data.get("min"))
                    .map(v -> Double.parseDouble(v.toString()))
                    .orElse(0.0));
            
            result.setStatTime(java.time.LocalDateTime.now());
            return result;
        }
    }

    /**
     * 模拟业务数据源
     */
    public static class MockBusinessDataSource implements SourceFunction<BusinessData> {
        private final String tableName;
        private volatile boolean running = true;

        public MockBusinessDataSource(String tableName) {
            this.tableName = tableName;
        }

        @Override
        public void run(SourceContext<BusinessData> ctx) {
            Random random = new Random();
            while (running) {
                Map<String, Object> data = new HashMap<>();
                data.put("amount", random.nextDouble() * 1000);
                data.put("quantity", random.nextInt(100));
                data.put("category", "category_" + random.nextInt(5));
                
                BusinessData businessData = new BusinessData(tableName, data);
                ctx.collect(businessData);
                
                try {
                    Thread.sleep(1000); // 每秒生成一条数据
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    break;
                }
            }
        }

        @Override
        public void cancel() {
            running = false;
        }
    }

    /**
     * 统计结果持久化Sink
     */
    public static class StatisticResultSink extends RichSinkFunction<StatisticResult> {
        private static final int BATCH_SIZE = 100;
        private static final long BATCH_INTERVAL_MS = 5000;

        private final String ruleId;
        private Connection connection;
        private PreparedStatement statement;
        private List<StatisticResult> batch;
        private long lastFlushTime;

        public StatisticResultSink(String ruleId) {
            this.ruleId = ruleId;
            this.batch = new ArrayList<>();
            this.lastFlushTime = System.currentTimeMillis();
        }

        @Override
        public void open(Configuration parameters) {
            try {
                // 初始化数据库连接
                connection = DriverManager.getConnection(
                        "jdbc:mysql://localhost:3306/data_report?useSSL=false&serverTimezone=UTC",
                        "root", "password"
                );
                connection.setAutoCommit(false);

                // 创建结果表（按规则ID分表）
                String tableName = "stat_result_" + ruleId;
                createResultTable(tableName);

                // 预编译SQL
                String sql = "INSERT INTO " + tableName +
                        " (rule_id, group_key, sum_value, avg_value, max_value, min_value, count_value, stat_time, create_time) " +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)";
                statement = connection.prepareStatement(sql);
            } catch (SQLException e) {
                log.error("初始化数据库连接失败", e);
                throw new RuntimeException("数据库连接初始化失败", e);
            }
        }

        @Override
        public void invoke(StatisticResult result, Context context) {
            batch.add(result);

            // 批量提交条件：数量达到批次大小或时间间隔超过阈值
            if (batch.size() >= BATCH_SIZE ||
                    System.currentTimeMillis() - lastFlushTime >= BATCH_INTERVAL_MS) {
                flush();
            }
        }

        @Override
        public void close() {
            // 关闭前刷新剩余数据
            if (!batch.isEmpty()) {
                flush();
            }

            if (statement != null) {
                try {
                    statement.close();
                } catch (SQLException e) {
                    log.error("关闭statement失败", e);
                }
            }
            if (connection != null) {
                try {
                    connection.close();
                } catch (SQLException e) {
                    log.error("关闭connection失败", e);
                }
            }
        }

        /**
         * 批量刷新数据
         */
        private void flush() {
            if (batch.isEmpty()) {
                return;
            }

            try {
                for (StatisticResult result : batch) {
                    statement.setString(1, result.getRuleId());
                    statement.setString(2, result.getGroupKey());
                    statement.setDouble(3, result.getSumValue() != null ? result.getSumValue() : 0.0);
                    statement.setDouble(4, result.getAvgValue() != null ? result.getAvgValue() : 0.0);
                    statement.setDouble(5, result.getMaxValue() != null ? result.getMaxValue() : 0.0);
                    statement.setDouble(6, result.getMinValue() != null ? result.getMinValue() : 0.0);
                    statement.setLong(7, result.getCountValue() != null ? result.getCountValue() : 0L);
                    statement.setTimestamp(8, Timestamp.valueOf(result.getStatTime()));
                    statement.setTimestamp(9, new Timestamp(System.currentTimeMillis()));
                    statement.addBatch();
                }

                statement.executeBatch();
                connection.commit();
                
                log.debug("批量插入{}条统计结果", batch.size());
                batch.clear();
                lastFlushTime = System.currentTimeMillis();
            } catch (SQLException e) {
                log.error("批量插入统计结果失败", e);
                try {
                    connection.rollback();
                } catch (SQLException rollbackEx) {
                    log.error("回滚失败", rollbackEx);
                }
            }
        }

        /**
         * 创建结果表
         */
        private void createResultTable(String tableName) {
            String createTableSql = "CREATE TABLE IF NOT EXISTS " + tableName + " (" +
                    "id BIGINT AUTO_INCREMENT PRIMARY KEY," +
                    "rule_id VARCHAR(50) NOT NULL," +
                    "group_key VARCHAR(100)," +
                    "sum_value DOUBLE," +
                    "avg_value DOUBLE," +
                    "max_value DOUBLE," +
                    "min_value DOUBLE," +
                    "count_value BIGINT," +
                    "stat_time TIMESTAMP," +
                    "create_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP," +
                    "INDEX idx_rule_id (rule_id)," +
                    "INDEX idx_stat_time (stat_time)" +
                    ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4";

            try (PreparedStatement stmt = connection.prepareStatement(createTableSql)) {
                stmt.execute();
                log.info("创建统计结果表成功: {}", tableName);
            } catch (SQLException e) {
                log.error("创建统计结果表失败: {}", tableName, e);
            }
        }
    }
}