package com.electric.flinkcep;

import com.electric.flinkcep.config.ConfigManager;
import com.electric.flinkcep.function.DataMatchFunction;
import com.electric.flinkcep.model.LoadPredictionEvent;
import com.electric.flinkcep.model.TrueData;
import com.electric.flinkcep.model.PredictData;
import com.electric.flinkcep.serialization.TrueDataDeserializer;
import com.electric.flinkcep.serialization.PredictDataDeserializer;
import com.electric.flinkcep.service.LoadPredictionCEPService;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;

/**
 * Flink CEP 主应用程序
 * 负责接收Kafka数据，进行复杂事件处理，并发送告警到后端
 */
public class FlinkCEPApplication {
    
    private static final Logger logger = LoggerFactory.getLogger(FlinkCEPApplication.class);
    
    public static void main(String[] args) throws Exception {
        logger.info("启动 Flink CEP 应用程序...");
        
        // 解析命令行参数
        ParameterTool params = ParameterTool.fromArgs(args);
        
        // 获取配置管理器
        ConfigManager config = ConfigManager.getInstance();
        
        // 创建执行环境
        StreamExecutionEnvironment env = createExecutionEnvironment(config);
        
        // 创建Kafka数据源
        DataStream<TrueData> trueDataStream = createTrueDataKafkaSource(env, config);
        DataStream<PredictData> predictDataStream = createPredictDataKafkaSource(env, config);
        
        // 匹配真实数据和预测数据
        DataStream<LoadPredictionEvent> matchedEventStream = matchData(trueDataStream, predictDataStream);
        
        // 应用电力负荷预测CEP模式
        applyLoadPredictionCEP(matchedEventStream);
        
        // 执行作业
        logger.info("开始执行 Flink CEP 作业...");
        env.execute("Flink CEP Application");
    }
    
    /**
     * 创建和配置执行环境
     */
    private static StreamExecutionEnvironment createExecutionEnvironment(ConfigManager config) {
        // 创建配置对象，启用Web UI
        Configuration conf = new Configuration();
        conf.setInteger(RestOptions.PORT, 8082);
        
        // 创建带Web UI的本地执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        
        // 设置并行度
        env.setParallelism(config.getFlinkParallelism());
        
        // 启用检查点
        env.enableCheckpointing(config.getFlinkCheckpointInterval());
        
        // 设置检查点模式
        if ("EXACTLY_ONCE".equals(config.getFlinkCheckpointMode())) {
            env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        } else {
            env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.AT_LEAST_ONCE);
        }
        
        // 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                3, // 重启次数
                org.apache.flink.api.common.time.Time.of(10, java.util.concurrent.TimeUnit.SECONDS) // 重启间隔
        ));
        
        logger.info("执行环境配置完成 - 并行度: {}, 检查点间隔: {}ms, 检查点模式: {}, Web UI: http://localhost:8082", 
                   config.getFlinkParallelism(), 
                   config.getFlinkCheckpointInterval(), 
                   config.getFlinkCheckpointMode());
        
        return env;
    }
    
    /**
     * 创建真实数据Kafka数据源
     */
    private static DataStream<TrueData> createTrueDataKafkaSource(StreamExecutionEnvironment env, ConfigManager config) {
        logger.info("创建真实数据Kafka数据源 - 服务器: {}, 主题: true_data, 消费组: {}", 
                   config.getKafkaBootstrapServers(), 
                   config.getKafkaGroupId());
        
        // 创建Kafka源
        KafkaSource<TrueData> kafkaSource = KafkaSource.<TrueData>builder()
                .setBootstrapServers(config.getKafkaBootstrapServers())
                .setTopics("true_data")
                .setGroupId(config.getKafkaGroupId() + "_true")
                .setStartingOffsets(getOffsetsInitializer(config.getKafkaAutoOffsetReset()))
                .setValueOnlyDeserializer(new TrueDataDeserializer())
                .build();
        
        // 创建数据流并设置水印策略
        DataStream<TrueData> trueDataStream = env.fromSource(
                kafkaSource,
                WatermarkStrategy.<TrueData>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner((trueData, timestamp) -> trueData.getTimestampMillis()),
                "True Data Kafka Source"
        );
        
        // 过滤无效数据并添加调试日志
        trueDataStream = trueDataStream.filter(data -> data != null && data.getTimestamp() != null)
                .map(data -> {
                    logger.info("接收到真实数据: timestamp={}, load={}, sequenceId={}", 
                               data.getTimestamp(), data.getLoad(), data.getSequenceId());
                    return data;
                });
        
        logger.info("真实数据Kafka数据源创建完成");
        return trueDataStream;
    }
    
    /**
     * 创建预测数据Kafka数据源
     */
    private static DataStream<PredictData> createPredictDataKafkaSource(StreamExecutionEnvironment env, ConfigManager config) {
        logger.info("创建预测数据Kafka数据源 - 服务器: {}, 主题: predict_data, 消费组: {}", 
                   config.getKafkaBootstrapServers(), 
                   config.getKafkaGroupId());
        
        // 创建Kafka源
        KafkaSource<PredictData> kafkaSource = KafkaSource.<PredictData>builder()
                .setBootstrapServers(config.getKafkaBootstrapServers())
                .setTopics("predict_data")
                .setGroupId(config.getKafkaGroupId() + "_predict")
                .setStartingOffsets(getOffsetsInitializer(config.getKafkaAutoOffsetReset()))
                .setValueOnlyDeserializer(new PredictDataDeserializer())
                .build();
        
        // 创建数据流并设置水印策略
        DataStream<PredictData> predictDataStream = env.fromSource(
                kafkaSource,
                WatermarkStrategy.<PredictData>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner((predictData, timestamp) -> predictData.getTimestampMillis()),
                "Predict Data Kafka Source"
        );
        
        // 过滤无效数据并添加调试日志
        predictDataStream = predictDataStream.filter(data -> data != null && data.getTimestamp() != null)
                .map(data -> {
                    logger.info("接收到预测数据: timestamp={}, predictedLoad={}, sequenceId={}", 
                               data.getTimestamp(), data.getPredictedLoad(), data.getSequenceId());
                    return data;
                });
        
        logger.info("预测数据Kafka数据源创建完成");
        return predictDataStream;
    }
    
    /**
     * 获取偏移量初始化器
     */
    private static OffsetsInitializer getOffsetsInitializer(String autoOffsetReset) {
        switch (autoOffsetReset.toLowerCase()) {
            case "earliest":
                return OffsetsInitializer.earliest();
            case "latest":
                return OffsetsInitializer.latest();
            default:
                logger.warn("未知的offset重置策略: {}, 使用默认值 latest", autoOffsetReset);
                return OffsetsInitializer.latest();
        }
    }
    
    /**
     * 匹配真实数据和预测数据
     */
    private static DataStream<LoadPredictionEvent> matchData(DataStream<TrueData> trueDataStream, 
                                                            DataStream<PredictData> predictDataStream) {
        logger.info("开始匹配真实数据和预测数据...");
        
        // 使用CoProcessFunction匹配数据
        DataStream<LoadPredictionEvent> matchedStream = trueDataStream
                .connect(predictDataStream)
                .keyBy(data -> "load", data -> "load") // 使用相同的key进行匹配
                .process(new DataMatchFunction());
        
        logger.info("数据匹配配置完成");
        return matchedStream;
    }
    
    /**
     * 应用电力负荷预测CEP模式
     */
    private static void applyLoadPredictionCEP(DataStream<LoadPredictionEvent> eventStream) {
        logger.info("开始应用电力负荷预测CEP模式...");
        
        // 应用连续5次误差较大的告警模式
        DataStream<String> alertStream = LoadPredictionCEPService.applyLoadPredictionPattern(eventStream);
        alertStream.map(alert -> {
            System.out.println(alert);
            return alert;
        }).print("告警");
        
        // 计算并输出MAE和RMSE指标
        DataStream<String> metricsStream = LoadPredictionCEPService.calculateMetrics(eventStream);
        metricsStream.map(metrics -> {
            System.out.println(metrics);
            return metrics;
        }).print("指标");
        
        logger.info("电力负荷预测CEP模式应用完成");
    }
    

}