package com.alarm.ecp;

import com.alarm.ecp.pojo.DeviceData;
import com.alarm.ecp.utils.MatchDemo;
import com.alibaba.fastjson2.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.IterativeCondition;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

/**
 * 行政区域地理围栏告警
 */
public class AdministrativeDivisionGeoFenceAlarmFlinkJob {
    private static final Logger logger = LoggerFactory.getLogger(AdministrativeDivisionGeoFenceAlarmFlinkJob.class);

    public static void main(String[] args) throws Exception {
        // 定义模式匹配
        Pattern<DeviceData, DeviceData> pattern = Pattern.<DeviceData>begin("start").where(new IterativeCondition<DeviceData>() {
            @Override
            public boolean filter(DeviceData deviceData, Context<DeviceData> context) throws Exception {
                return !MatchDemo.isPointInPolygon(deviceData, args);
            }
        }).within(Time.seconds(60)).next("enter").where(new IterativeCondition<DeviceData>() {
            @Override
            public boolean filter(DeviceData deviceData, Context<DeviceData> context) throws Exception {
                return MatchDemo.isPointInPolygon(deviceData, args);
            }
        });
        //.within(Time.seconds(60))  这个写在哪个匹配后面表示该匹配模式匹配的时长，超过这个时长则认为匹配结束

        // 定义模式匹配
        Pattern<DeviceData, DeviceData> pattern1 = Pattern.<DeviceData>begin("start").where(new IterativeCondition<DeviceData>() {
            @Override
            public boolean filter(DeviceData deviceData, Context<DeviceData> context) throws Exception {
                return MatchDemo.isPointInPolygon(deviceData, args);
            }
        }).next("exit").where(new IterativeCondition<DeviceData>() {
            @Override
            public boolean filter(DeviceData deviceData, Context<DeviceData> context) throws Exception {
                return !MatchDemo.isPointInPolygon(deviceData, args);
            }
        });


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        // 从命令行参数获取配置文件路径
        ParameterTool parameterTool = ParameterTool.fromArgs(args);

        String configFilePath = parameterTool.get("configPath");
        if (configFilePath == null || configFilePath.isEmpty()) {
            throw new IllegalArgumentException("Missing configPath parameter.");
        }

        // 从指定路径加载 properties 文件
        ParameterTool config = ParameterTool.fromPropertiesFile(new File(configFilePath));
//        Config config = ConfigFactory.load();
        //env.enableCheckpointing(10000); // 每10秒进行一次检查点
        //env.setStateBackend(new FsStateBackend(config.getString("flink.checkpoint.data.uri") + "administrative_division"));

        // 配置 Kafka 消费者属性
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "polygon-fence-group");

        // 创建 Kafka 消费者
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("zhongji-qianyi-real-time-fence", new SimpleStringSchema(), properties);
        kafkaConsumer.setStartFromLatest();
        //kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
        //kafkaConsumer.setStartFromEarliest();


        DataStream<String> inputKafkaStream = env.addSource(kafkaConsumer);
        inputKafkaStream.printToErr();


        // 解析 JSON 数据流
        DataStream<DeviceData> parsedDataStream = inputKafkaStream.map((MapFunction<String, DeviceData>) value -> {
            try {
                return JSONObject.parseObject(value, DeviceData.class);
            } catch (Exception e) {
                logger.error(e.getMessage());
            }
            return null;
        }).filter(Objects::nonNull).filter(data -> Objects.nonNull(data._area_code));
        // 测试数据只有苏州，过滤掉其他地区数据
        //.filter(data -> "3205".equals(data._area_code.substring(0, 4)) && "2iS9mDxro25".equals(data.__deviceId__));

        //parsedDataStream.printToErr();

        DataStream<DeviceData> deviceDataStringKeyedStream = parsedDataStream
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<DeviceData>forBoundedOutOfOrderness(Duration.ofSeconds(10))  //  允许10秒的乱序事件
                        .withIdleness(Duration.ofSeconds(10)) // 允许空闲10秒
                        .withTimestampAssigner((event, timestamp) -> event.getReceive_time())).keyBy(
                        (KeySelector<DeviceData, Object>) DeviceData::get__deviceId__);

//        withTimestampsAndWatermarks.print(">>> 数据流：");

        PatternStream<DeviceData> patternStream = CEP.pattern(deviceDataStringKeyedStream, pattern);
        PatternStream<DeviceData> patternStream1 = CEP.pattern(deviceDataStringKeyedStream, pattern1);

        // 定义侧输出流标签
        OutputTag<DeviceData> timeoutOutput = new OutputTag<DeviceData>("timeoutOutput") {
        };

        SingleOutputStreamOperator<DeviceData> select = patternStream.select(timeoutOutput, (PatternTimeoutFunction<DeviceData, DeviceData>) (map, l) -> map.get("start").get(0), (PatternSelectFunction<DeviceData, DeviceData>) map -> map.get("enter").get(0));
        SingleOutputStreamOperator<DeviceData> process = patternStream1.process(new PatternProcessFunction<DeviceData, DeviceData>() {
            @Override
            public void processMatch(Map<String, List<DeviceData>> map, Context context, Collector<DeviceData> collector) throws Exception {
                collector.collect(map.get("start").get(0));
            }
        });

        DataStream<DeviceData> sideOutput = select.getSideOutput(timeoutOutput);
        select.printToErr(">>> 进围栏：");
        sideOutput.printToErr(">>> 超时：");
        process.printToErr(">>> 出围栏");

        env.execute("AdministrativeDivisionGeoFenceAlarmFlinkJob1");
    }
}
