package com.atliuzu.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.cep.*;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.List;
import java.util.Map;

/**
 * @Author : songyuan
 * @Description :
 * Date : 2022/8/19 15:26
 * @Version : 1.0
 */
public class DwdTrafficUserJumpDetail {

    public static void main(String[] args) throws Exception {
        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);



        // TODO 3. 从 kafka dwd_traffic_page_log 主题读取日志数据，封装为流
        String topic = "dwd_traffic_page_log";
        String groupId = "dwd_traffic_user_jump";
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId);
        DataStreamSource<String> pageLog = env.addSource(kafkaConsumer);

        // TODO 4. 转换结构
        SingleOutputStreamOperator<JSONObject> loginDS = pageLog.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(value);

                    String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                    if (lastPageId == null) {
                        out.collect(jsonObject);
                    }
                } catch (Exception e) {
                    System.out.println("脏数据:" + value);
                }
            }
        });



        // TODO 5. 设置水位线，用于用户跳出统计
        SingleOutputStreamOperator<JSONObject> withWatermarkStream = loginDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<JSONObject>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<JSONObject>() {
                                    @Override
                                    public long extractTimestamp(JSONObject jsonObj, long recordTimestamp) {
                                        return jsonObj.getLong("ts");
                                    }
                                }
                        )
        );

        // TODO 6. 按照 mid 分组
        KeyedStream<JSONObject, String> keyedStream = withWatermarkStream.keyBy(jsonOjb -> jsonOjb.getJSONObject("common").getString("mid"));

        // TODO 7. 定义 CEP 匹配规则
        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first").where(
                new SimpleCondition<JSONObject>() {

                    @Override
                    public boolean filter(JSONObject jsonObj) throws Exception {
                        String lastPageId = jsonObj.getJSONObject("page").getString("last_page_id");
                        return lastPageId == null;
                    }
                }
        ).next("second").where(
                new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject jsonObj) throws Exception {
                        String lastPageId = jsonObj.getJSONObject("page").getString("last_page_id");
                        return lastPageId == null;
                    }
                }
                // 上文调用了同名 Time 类，此处需要使用全类名
        ).within(Time.seconds(10));

        // TODO 8. 把 Pattern 应用到流上
        PatternStream<JSONObject> patternStream = CEP.pattern(keyedStream, pattern);

        // TODO 9. 提取匹配上的事件以及超时事件
        OutputTag<JSONObject> timeoutTag = new OutputTag<JSONObject>("timeoutTag") {
        };
        SingleOutputStreamOperator<JSONObject> flatSelectStream = patternStream.flatSelect(
                timeoutTag,
                new PatternFlatTimeoutFunction<JSONObject, JSONObject>() {
                    @Override
                    public void timeout(Map<String, List<JSONObject>> pattern, long timeoutTimestamp, Collector<JSONObject> out) throws Exception {
                        JSONObject element = pattern.get("first").get(0);
                        out.collect(element);

                    }
                },
                new PatternFlatSelectFunction<JSONObject, JSONObject>() {
                    @Override
                    public void flatSelect(Map<String, List<JSONObject>> pattern, Collector<JSONObject> out) throws Exception {
                        JSONObject element = pattern.get("first").get(0);
                        out.collect(element);
                    }
                }
        );

        DataStream<JSONObject> timeOutDStream = flatSelectStream.getSideOutput(timeoutTag);

        // TODO 11. 合并两个流并将数据写出到 Kafka
        flatSelectStream.print("flat");
        timeOutDStream.print("timeout");

        DataStream<JSONObject> unionDStream = flatSelectStream.union(timeOutDStream);
        String targetTopic = "dwd_traffic_user_jump_detail";
        FlinkKafkaProducer<String> kafkaProducer = MyKafkaUtil.getFlinkKafkaProducer(targetTopic);
        unionDStream .map(JSONAware::toJSONString)
                .addSink(kafkaProducer);

        env.execute();

//        //1.创建环境
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1);
//
//        //2.状态后端
////2.状态后端设置
//        //        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        //        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        //        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        //        env.getCheckpointConfig().enableExternalizedCheckpoints(
//        //                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        //        );
//        //        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//        //                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        //        ));
//        //        env.setStateBackend(new HashMapStateBackend());
//        //        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        //        System.setProperty("HADOOP_USER_NAME", "atguigu");
//
//        //3.读取topic_log 数据  并转换格式 提取时间戳
//        String topic = "dwd_traffic_page_log";
//        String groupId = "User_Jump_Detail";
//        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
//
//        SingleOutputStreamOperator<JSONObject> jsonWithWMDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
//            @Override
//            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
//                try {
//                    JSONObject jsonObject = JSONObject.parseObject(value);
//                    out.collect(jsonObject);
//                } catch (Exception e) {
//                    System.out.println("脏数据" + value);
//                }
//            }
//        }).assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
//                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
//                    @Override
//                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
//                        return element.getLong("ts");
//                    }
//                }));
//
//        //4.按照mid分组
//        KeyedStream<JSONObject, String> keyedDS = jsonWithWMDS.keyBy(json -> json.getJSONObject("common").getString("mid"));
//        //kafkaDS.print("====");
//
//        //5.筛选出跳出会话情况 并写入相应的主题
//        //使用CEP编程
//        //当前浏览页面的 last_page_id ==null 下一个浏览页面的 last_page_id 也为null
//        // 同时超时10秒的数据 也属于跳出数据
//
//
//        //定义模式序列
////        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first").where(new SimpleCondition<JSONObject>() {
////            @Override
////            public boolean filter(JSONObject value) throws Exception {
////                return value.getJSONObject("page").getString("last_page_id") == null;
////            }
////        }).times(2).consecutive().within(Time.seconds(10L));
//
//        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first").where(new SimpleCondition<JSONObject>() {
//            @Override
//            public boolean filter(JSONObject value) throws Exception {
//                return value.getJSONObject("page").getString("last_page_id") == null;
//            }
//        }).next("second").where(new SimpleCondition<JSONObject>() {
//            @Override
//            public boolean filter(JSONObject value) throws Exception {
//                return value.getJSONObject("page").getString("last_page_id") == null;
//            }
//        }).within(Time.seconds(10));
//
//        //把模式序列作用到流上
//        PatternStream<JSONObject> patternDS = CEP.pattern(keyedDS, pattern);
//
//
//        //提取事件(包括超时事件)
//        OutputTag<String> outputTag = new OutputTag<String>("time-out") {
//        };
//        SingleOutputStreamOperator<String> selectDS = patternDS.select(outputTag, new PatternTimeoutFunction<JSONObject, String>() {
//            @Override
//            public String timeout(Map<String, List<JSONObject>> pattern, long timeoutTimestamp) throws Exception {
//                return pattern.get("first").get(0).toJSONString();
//            }
//        }, new PatternSelectFunction<JSONObject, String>() {
//            @Override
//            public String select(Map<String, List<JSONObject>> pattern) throws Exception {
//                return pattern.get("first").get(0).toJSONString();
//            }
//        });
//        DataStream<String> timeOutDS = selectDS.getSideOutput(outputTag);
//
//        //6.合并流
//        DataStream<String> unionDS = selectDS.union(timeOutDS);
//
//        //7.写入kafka
//        selectDS.print("select===============");
//        timeOutDS.print("timeOut=============");
////        String TopicOut = "dwd_traffic_user_jump_detail";
////        unionDS.addSink(MyKafkaUtil.getFlinkKafkaProducer(TopicOut));
//
//        //8.启动任务
//        env.execute("DwdTrafficUserJumpDetail");
    }

}
